diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..6b6220572 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,91 @@ +version: 2.1 + + +orbs: + orb: invocations/orb@1.0.4 + + +jobs: + sdist-test-suite: + executor: + name: orb/default + version: "3.6" + steps: + - orb/setup + - run: inv release.build --no-wheel --directory . + - run: | + cd dist + tar xzvf *.tar.gz + rm -v *.tar.gz + cd paramiko-* + pip install -e . + inv -e test + - orb/debug + + kerberos: + executor: + name: orb/default + version: "3.6" + steps: + - orb/setup + # Required to actually see all of universe/multiverse :( + - run: sudo apt update + # System reqs to install/build gssapi c-ext & friends (who only + # appear to offer wheels for Windows) + - run: sudo apt install -y libkrb5-dev krb5-admin-server krb5-kdc + # Our gssapi-supporting flavor, eg gssapi, pyasn1 etc + - run: pip install -e '.[gssapi]' + # Test-only deps for Kerberos (if they are importable it triggers + # running the kerberos tests instead of skipping them) + - run: pip install k5test + # Do the thing, win the points! + - run: inv test + - orb/debug + + +workflows: + main: + jobs: + # The basics + - orb/lint: + name: Lint + - orb/format: + name: Style check + # Main test run, w/ coverage, and latest-supported cryptography + - orb/coverage: + name: Test 3.6 (w/ coverage, latest crypto) + # Non-coverage runs w/ other crypto versions. + # (Phrased as 2-dimensional matrix but 3.6 only for now to save credits) + - orb/test: + name: Test << matrix.version >> w/ << matrix.pip-overrides >> + matrix: + parameters: + version: ["3.6"] + # TODO: I don't see a nicer way to do this that doesn't require + # making the orb know too much about its client code... + pip-overrides: ["cryptography==2.5", "cryptography==3.4"] + # Kerberos tests. Currently broken :( + #- kerberos: + # name: Test 3.6 w/ Kerberos support + # # No point testing k5 if base tests already fail + # requires: ["Test 3.6 (w/ coverage, latest crypto)"] + - orb/test-release: + name: Release test + # Ensure test suite is included in sdist & functions appropriately + - sdist-test-suite: + name: Test within sdist + requires: + - "Test 3.6 (w/ coverage, latest crypto)" + - "Release test" + # Test other interpreters if main passed + - orb/test: + name: Test << matrix.version >> + requires: ["Test 3.6 (w/ coverage, latest crypto)"] + matrix: + parameters: + version: ["3.7", "3.8", "3.9"] + # Test doc building if main test suite passed (no real reason to spend + # all those credits if the main tests would also fail...) + - orb/docs: + name: "Docs" + requires: ["Test 3.6 (w/ coverage, latest crypto)"] diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 000000000..41ab9cc4a --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,3 @@ +comment: false +coverage: + precision: 0 diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..47b6f4c3d --- /dev/null +++ b/.coveragerc @@ -0,0 +1,5 @@ +[run] +branch = True +include = + paramiko/* + tests/* diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..1317fd523 --- /dev/null +++ b/.flake8 @@ -0,0 +1,9 @@ +[flake8] +exclude = sites,.git,build,dist,demos +# NOTE: W503, E203 are concessions to black 18.0b5 and could be reinstated +# later if fixed on that end. +# NOTE: E722 seems to only have started popping up on move to flake8 3.6.0 from +# 2.4.0. Not sure why, bare excepts have been a (regrettable) thing forever... +ignore = E124,E125,E128,E261,E301,E302,E303,E402,E721,W503,E203,E722 +max-line-length = 79 + diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..30ce38c88 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +tidelift: "pypi/paramiko" diff --git a/.gitignore b/.gitignore index 44b45974b..c6a6c4947 100644 --- a/.gitignore +++ b/.gitignore @@ -3,9 +3,12 @@ build/ dist/ .tox/ paramiko.egg-info/ -test.log docs/ demos/*.log !sites/docs _build .coverage +.cache +.idea +coverage.xml +htmlcov diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 000000000..1f6737f48 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,19 @@ +version: 2 + +formats: all + +build: + # Specific version of RTD docker image that's new enough to have Rust around + # (so Cryptography can build from source if needed). + # See https://github.com/readthedocs/readthedocs-docker-images + image: "7.0" + +python: + version: 3.7 + install: + - requirements: dev-requirements.txt + system_packages: false + +# NOTE: leaving sphinx.* alone as there's currently no way to differ the conf +# file used based on project (so cannot differentiate between www and docs). +# The web UI setting for that SHOULD take precedence? diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 3b7b2b42b..000000000 --- a/.travis.yml +++ /dev/null @@ -1,35 +0,0 @@ -language: python -sudo: false -cache: - directories: - - $HOME/.cache/pip -python: - - "2.6" - - "2.7" - - "3.3" - - "3.4" - - "3.5" -install: - # Self-install for setup.py-driven deps - - pip install -e . - # Dev (doc/test running) requirements - - pip install coveralls # For coveralls.io specifically - - pip install -r dev-requirements.txt -script: - # Main tests, w/ coverage! - - inv test --coverage - # Ensure documentation & invoke pipeline run OK. - # Run 'docs' first since its objects.inv is referred to by 'www'. - # Also force warnings to be errors since most of them tend to be actual - # problems. - - invoke docs -o -W www -o -W -notifications: - irc: - channels: "irc.freenode.org#paramiko" - template: - - "%{repository}@%{branch}: %{message} (%{build_url})" - on_success: change - on_failure: change - email: false -after_success: - - coveralls diff --git a/ChangeLog.0 b/ChangeLog.0 deleted file mode 100644 index c151d251e..000000000 --- a/ChangeLog.0 +++ /dev/null @@ -1,42 +0,0 @@ - -2003-08-24: - * implemented the other hashes: all 4 from the draft are working now - * added 'aes128-cbc' and '3des-cbc' cipher support - * fixed channel eof/close semantics -2003-09-12: version "aerodactyl" - * implemented group-exchange kex ("kex-gex") - * implemented RSA/DSA private key auth -2003-09-13: - * fixed inflate_long and deflate_long to handle negatives, even though - they're never used in the current ssh protocol -2003-09-14: - * fixed session_id handling: re-keying works now - * added the ability for a Channel to have a fileno() for select/poll - purposes, although this will cause worse window performance if the - client app isn't careful -2003-09-16: version "bulbasaur" - * fixed pipe (fileno) method to be nonblocking and it seems to work now - * fixed silly bug that caused large blocks to be truncated -2003-10-08: - * patch to fix Channel.invoke_subsystem and add Channel.exec_command - [vaclav dvorak] - * patch to add Channel.sendall [vaclav dvorak] - * patch to add Channel.shutdown [vaclav dvorak] - * patch to add Channel.makefile and a ChannelFile class which emulates - a python file object [vaclav dvorak] -2003-10-26: - * thread creation no longer happens during construction -- use the new - method "start_client(event)" to get things rolling - * re-keying now takes place after 1GB of data or 1 billion packets - (these limits can be easily changed per-session if needed) -2003-11-06: - * added a demo server and host key -2003-11-09: - * lots of changes to server mode - * ChannelFile supports universal newline mode; fixed readline - * fixed a bug with parsing the remote banner -2003-11-10: version "charmander" - * renamed SSHException -> SecshException - * cleaned up server mode and the demo server - -*** for all subsequent changes, please see 'tla changelog'. diff --git a/ChangeLog.1 b/ChangeLog.1 deleted file mode 100644 index 2fdae5a0f..000000000 --- a/ChangeLog.1 +++ /dev/null @@ -1,2928 +0,0 @@ -# do not edit -- automatically generated by arch changelog -# arch-tag: automatic-ChangeLog--robey@lag.net--2003-public/secsh--dev--1.0 -# - -2005-04-18 00:53:57 GMT Robey Pointer patch-164 - - Summary: - fix some docs - Revision: - secsh--dev--1.0--patch-164 - - remove some epydoc comments about fileno() being non-portable. - - modified files: - paramiko/channel.py - - -2005-04-18 00:30:52 GMT Robey Pointer patch-163 - - Summary: - add SFTPClient.close() - Revision: - secsh--dev--1.0--patch-163 - - add SFTPClient.close() and add a simple little unit test for it. - - - modified files: - paramiko/sftp_client.py tests/test_sftp.py - - -2005-04-18 00:11:34 GMT Robey Pointer patch-162 - - Summary: - avoid os.environ['HOME'] in the demos - Revision: - secsh--dev--1.0--patch-162 - - avoid using os.environ['HOME'], which will never work on windows, and - use os.path.expanduser() instead. it's semi-moot because windows doesn't - have a standard location for ssh files, but i think paramiko should set a - good example anyway. - - modified files: - demo.py demo_simple.py - - -2005-04-16 23:38:22 GMT Robey Pointer patch-161 - - Summary: - integrated laptop work (test commit) - Revision: - secsh--dev--1.0--patch-161 - - Patches applied: - - * robey@lag.net--2003-public-master-shake/secsh--dev--1.0--base-0 - tag of robey@lag.net--2003-public/secsh--dev--1.0--patch-160 - - * robey@lag.net--2003-public-master-shake/secsh--dev--1.0--patch-1 - test commit - - * robey@lag.net--2003-public/secsh--dev--1.0--base-0 - initial import - - * robey@lag.net--2003-public/secsh--dev--1.0--patch-1 - no changes - - - modified files: - README paramiko/server.py - - new patches: - robey@lag.net--2003-public-master-shake/secsh--dev--1.0--base-0 - robey@lag.net--2003-public-master-shake/secsh--dev--1.0--patch-1 - - -2005-04-10 00:46:41 GMT Robey Pointer patch-160 - - Summary: - 1.3 marowak - Revision: - secsh--dev--1.0--patch-160 - - bump version to 1.3 / marowak - - modified files: - Makefile README paramiko/__init__.py paramiko/transport.py - setup.py - - -2005-04-10 00:39:18 GMT Robey Pointer patch-159 - - Summary: - clean up SFTPAttributes.__repr__ - Revision: - secsh--dev--1.0--patch-159 - - clean up SFTPAttributes repr() a bit. - - modified files: - paramiko/sftp_attr.py - - -2005-04-10 00:13:54 GMT Robey Pointer patch-158 - - Summary: - remove ChangeLog from MANIFEST.in - Revision: - secsh--dev--1.0--patch-158 - - remove ChangeLog from the dist list. - - modified files: - MANIFEST.in - - -2005-04-06 07:24:28 GMT Robey Pointer patch-157 - - Summary: - change SubsystemHandler/SFTPServerInterface API - Revision: - secsh--dev--1.0--patch-157 - - change the API of SubsystemHandler to accept a reference to the - ServerInstance object during construction. this will break all code - that currently creates subsystem handlers (like sftp servers) -- sorry! - - lots of little doc fixups (mostly indenting). - - modified files: - paramiko/server.py paramiko/sftp_server.py paramiko/sftp_si.py - paramiko/transport.py tests/stub_sftp.py - - -2005-03-26 05:53:00 GMT Robey Pointer patch-156 - - Summary: - rewrite channel pipes to work on windows - Revision: - secsh--dev--1.0--patch-156 - - the pipe system i was using for simulating an os-level FD (for select) was - retarded. i realized this week that i could just use a single byte in the - pipe to signal "data is ready" and not try to feed all incoming data thru - the pipe -- and then i don't have to try to make the pipe non-blocking (which - should make it work on windows). a lot of duplicate code got removed and now - it's all going thru the same code-path on read. - - there's still a slight penalty on incoming feeds and calling 'recv' when a - pipe has been opened (by calling 'fileno'), but it's tiny. - - removed a bunch of documentation and comments about things not working on - windows, since i think they probably do now. - - - removed files: - .arch-ids/demo_windows.py.id demo_windows.py - - modified files: - MANIFEST.in README paramiko/channel.py - - -2005-03-25 20:06:56 GMT Robey Pointer patch-155 - - Summary: - fix sending of large sftp packet sizes - Revision: - secsh--dev--1.0--patch-155 - - fix a bug where packets larger than about 12KB would cause the session to - die on platforms other than osx. turns out that on most platforms, setting a - socket timeout also causes timeouts to occur on writes (but not on osx). so - on a huge write, once the os buffers were full, paramiko would get a - socket.timeout exception when writing, and bail. - - since the timeout is primarily so we can periodically poll to see if the - session has been killed from elsewhere, do that on a timeout but otherwise - continue trying to write. large packet sizes (in sftp) should now work. - - modified files: - paramiko/transport.py - - -2005-02-28 08:06:08 GMT Robey Pointer patch-154 - - Summary: - even better 1.2 lapras - Revision: - secsh--dev--1.0--patch-154 - - re-bump the version # to 1.2 (with a new date since i added more stuff). - add 2005 to the copyright date in a bunch of files. - - - modified files: - Makefile README demo.py demo_server.py demo_simple.py - demo_windows.py forward.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/ber.py paramiko/channel.py - paramiko/common.py paramiko/dsskey.py paramiko/file.py - paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/logging22.py paramiko/message.py paramiko/pkey.py - paramiko/primes.py paramiko/rsakey.py paramiko/server.py - paramiko/sftp.py paramiko/sftp_attr.py paramiko/sftp_client.py - paramiko/sftp_handle.py paramiko/sftp_server.py - paramiko/sftp_si.py paramiko/ssh_exception.py - paramiko/transport.py paramiko/util.py setup.py test.py - tests/loop.py tests/stub_sftp.py tests/test_file.py - tests/test_kex.py tests/test_message.py tests/test_pkey.py - tests/test_sftp.py tests/test_transport.py - - -2005-02-28 07:49:56 GMT Robey Pointer patch-153 - - Summary: - tweak sftp_file write behavior on large blocks of data - Revision: - secsh--dev--1.0--patch-153 - - BufferedFile.write() wasn't correctly dealing with the possibility that the - underlying write might not write the entire data block at once (even though - the docs said it would). now that it's working, make sftp_file take - advantage of it in order to chop up blocks larger than 32kB (the max allowed - on sftp) and add a unit test for it. - - - modified files: - paramiko/file.py paramiko/sftp_file.py tests/test_sftp.py - - -2005-02-28 07:17:21 GMT Robey Pointer patch-152 - - Summary: - little doc fixes - Revision: - secsh--dev--1.0--patch-152 - - stupid little doc fixups that didn't fit with the other patches. - - modified files: - paramiko/auth_transport.py tests/loop.py - - -2005-02-28 07:16:22 GMT Robey Pointer patch-151 - - Summary: - fix race in transport thread startup - Revision: - secsh--dev--1.0--patch-151 - - set active=True from the methods that start the main transport thread, right - before actually starting the thread. this avoids a race where the main - thread could be started, but the original thread could wake up from the - event.wait(0.1) before the new thread actually set the transport active. - impossible, you say? no machines so slow exist? au contraire, my sad - little linux box faced this problem earlier today. - - - modified files: - paramiko/transport.py - - -2005-02-28 07:14:11 GMT Robey Pointer patch-150 - - Summary: - when combining stderr with stdout on a channel, merge the buffers too - Revision: - secsh--dev--1.0--patch-150 - - when turning on combine-stderr mode on a channel, grab the channel lock and - feed any existing stderr buffer into the normal buffer. this should help - applications (and my unit tests) avoid races between data coming in over - stderr and setting combine-stderr. - - _send_eof is now slightly safer too, although i don't think that really fixed - anything. it just makes me feel better. - - modified files: - paramiko/channel.py - - -2005-02-28 07:09:02 GMT Robey Pointer patch-149 - - Summary: - add thread ids to logs - Revision: - secsh--dev--1.0--patch-149 - - add a logging filter that reports the thread-id of the logger, and use - that for all paramiko logging. since thread-local stuff didn't appear - until python 2.4, i hacked up my own little version to assign incrementing - numbers to threads as they log. - - - modified files: - paramiko/channel.py paramiko/sftp.py paramiko/sftp_client.py - paramiko/sftp_server.py paramiko/transport.py paramiko/util.py - - -2005-02-26 21:12:43 GMT Robey Pointer patch-148 - - Summary: - forgot to check in stub_sftp - Revision: - secsh--dev--1.0--patch-148 - - yikes! don't forget to check this in: needed for unit tests. - - new files: - tests/.arch-ids/stub_sftp.py.id tests/stub_sftp.py - - -2005-02-26 21:11:04 GMT Robey Pointer patch-147 - - Summary: - 1.2 (lapras) - Revision: - secsh--dev--1.0--patch-147 - - bump version stuff to 1.2 / lapras. - - modified files: - Makefile README paramiko/__init__.py paramiko/transport.py - setup.py - - -2005-02-15 15:48:47 GMT Robey Pointer patch-146 - - Summary: - raise better exception on empty key - Revision: - secsh--dev--1.0--patch-146 - - raise a clearer exception when trying to create an empty key. - - - modified files: - README paramiko/dsskey.py paramiko/rsakey.py - tests/test_transport.py - - -2005-02-15 15:47:02 GMT Robey Pointer patch-145 - - Summary: - add methods for sending/receiving a channel's exit status - Revision: - secsh--dev--1.0--patch-145 - - track a channel's exit status and provide a method (recv_exit_status) to - block waiting for it to arrive. also provide a convenience method for - servers to send it (send_exit_status). add shutdown_read and shutdown_write. - fix a bug in sending window change requests. - - - modified files: - README paramiko/channel.py paramiko/transport.py - - -2005-02-06 23:32:22 GMT Robey Pointer patch-144 - - Summary: - fix docs - Revision: - secsh--dev--1.0--patch-144 - - clean up some of the docs. - - - modified files: - README paramiko/pkey.py paramiko/sftp_attr.py - - -2005-02-06 23:30:40 GMT Robey Pointer patch-143 - - Summary: - fix an sftp unit test - Revision: - secsh--dev--1.0--patch-143 - - fix one of the sftp unit tests to actually work. - - - modified files: - tests/test_sftp.py - - -2005-02-05 07:45:20 GMT Robey Pointer patch-142 - - Summary: - fix windows sample script's HOME - Revision: - secsh--dev--1.0--patch-142 - - fix the HOME environ var to work on windows too. - - modified files: - demo_windows.py - - -2005-01-25 05:17:55 GMT Robey Pointer patch-141 - - Summary: - misc logging fixes - Revision: - secsh--dev--1.0--patch-141 - - change the level of some log messages so interesting stuff gets logged at - info instead of debug. fix an oops where channels defaulted to being in - ultra debug mode, and make this mode depend on a new Transport method: - "set_hexdump". - - - modified files: - paramiko/auth_transport.py paramiko/channel.py - paramiko/sftp.py paramiko/sftp_client.py - paramiko/sftp_server.py paramiko/transport.py - - -2005-01-17 10:09:09 GMT Robey Pointer patch-140 - - Summary: - more flexible logging - Revision: - secsh--dev--1.0--patch-140 - - some tweaks to make channels etc follow the logger setting of their parent - transport, so that setting the log channel for a paramiko transport will - cause all sub-logging to branch out from that channel. - - also, close all open file handles when the sftp server ends. - - - modified files: - paramiko/channel.py paramiko/sftp_attr.py - paramiko/sftp_client.py paramiko/sftp_handle.py - paramiko/sftp_server.py paramiko/transport.py - - -2005-01-16 21:03:15 GMT Robey Pointer patch-139 - - Summary: - make loopback sftp tests the default - Revision: - secsh--dev--1.0--patch-139 - - change the unit tests to default to always running the sftp tests locally, - and make a -R option to force the tests to run against a remote server. - the tests seem to work fine locally, and it helps test out server mode, - even though there's a danger that they could get isolated from reality - and only test that paramiko can talk to itself. - - - modified files: - test.py - - -2005-01-16 20:14:07 GMT Robey Pointer patch-138 - - Summary: - doc fixups - Revision: - secsh--dev--1.0--patch-138 - - little doc fixups that i did obsessively on the train one morning. - - modified files: - paramiko/file.py - - -2005-01-09 05:27:07 GMT Robey Pointer patch-137 - - Summary: - added listdir_attr() - Revision: - secsh--dev--1.0--patch-137 - - add SFTPClient.listdir_attr() to fetch a list of files & their attributes, - instead of just their filenames. artur piwko would find this useful. - - - modified files: - paramiko/sftp_attr.py paramiko/sftp_client.py - - -2004-12-19 19:56:48 GMT Robey Pointer patch-136 - - Summary: - loopback sftp test - Revision: - secsh--dev--1.0--patch-136 - - add ability to turn off more tests, and a secret (for now) -X option to do - the sftp tests via loopback socket. added another symlink sftp test to see - what happens with absolute symlinks. - - - modified files: - test.py tests/test_sftp.py - - -2004-12-19 19:50:00 GMT Robey Pointer patch-135 - - Summary: - more sftp cleanup - Revision: - secsh--dev--1.0--patch-135 - - oops, this should've been part of the last patch. - - - modified files: - paramiko/sftp_si.py - - -2004-12-19 19:43:27 GMT Robey Pointer patch-134 - - Summary: - cleanup & docs in sftp - Revision: - secsh--dev--1.0--patch-134 - - add some more docs to SFTPHandle, and give a default implementation for - close() that's usually right. add a flush() to the default implementation - of write(). document that symlink's args in the sftp protocol are out of - order (the spec is wrong). - - - modified files: - paramiko/sftp_handle.py paramiko/sftp_server.py - - -2004-12-13 07:32:14 GMT Robey Pointer patch-133 - - Summary: - unit test madness - Revision: - secsh--dev--1.0--patch-133 - - add some more testy bits and fix up some other bits. - - - modified files: - tests/test_transport.py - - -2004-12-13 07:31:01 GMT Robey Pointer patch-132 - - Summary: - oops (continued) - Revision: - secsh--dev--1.0--patch-132 - - er, part 2 of that. - - - modified files: - paramiko/server.py - - -2004-12-13 07:29:38 GMT Robey Pointer patch-131 - - Summary: - move check_global_request - Revision: - secsh--dev--1.0--patch-131 - - move check_global_request into the server interface -- i missed it during - the initial move (oops). - - - modified files: - paramiko/transport.py - - -2004-12-13 07:27:39 GMT Robey Pointer patch-130 - - Summary: - small fixups - Revision: - secsh--dev--1.0--patch-130 - - move _wait_for_send_window into the right place in Channel. remove outdated - note from auth_transport. fix download url in setup.py. - - - - modified files: - paramiko/auth_transport.py paramiko/channel.py setup.py - - -2004-12-12 09:58:40 GMT Robey Pointer patch-129 - - Summary: - 1.1 (kabuto) - Revision: - secsh--dev--1.0--patch-129 - - edit various files to bump the version to 1.1. - also fix to point to the new url. - - - modified files: - Makefile README paramiko/__init__.py paramiko/transport.py - setup.py - - -2004-12-12 09:38:24 GMT Robey Pointer patch-128 - - Summary: - more unit tests - Revision: - secsh--dev--1.0--patch-128 - - added unit tests for multi-part auth, exec_command, and invoke_shell. - - - modified files: - tests/test_transport.py - - -2004-12-12 09:32:17 GMT Robey Pointer patch-127 - - Summary: - doc fixups - Revision: - secsh--dev--1.0--patch-127 - - fix some typos in sftp_client docs - - - modified files: - paramiko/sftp_client.py - - -2004-12-12 09:25:15 GMT Robey Pointer patch-126 - - Summary: - server support for stderr & exec_command - Revision: - secsh--dev--1.0--patch-126 - - for the server side of my stderr blunder, add send_stderr & sendall_stderr, - and make the sending side of makefile_stderr work correctly. - - also, call check_channel_exec_request on a server object for exec requests - on a channel. - - - modified files: - paramiko/channel.py paramiko/server.py - - -2004-12-12 09:16:03 GMT Robey Pointer patch-125 - - Summary: - add client-side multi-part auth support - Revision: - secsh--dev--1.0--patch-125 - - added support for multi-part authentication (even though nobody supports it - that i've seen). on a successful "partial" auth, the auth_* method will - return a list of acceptable means to continue authenticating. - - - modified files: - paramiko/auth_transport.py paramiko/ssh_exception.py - - -2004-12-11 03:44:33 GMT Robey Pointer patch-124 - - Summary: - docs fixup - Revision: - secsh--dev--1.0--patch-124 - - fix a comment typo, and add @since designators to a couple of new methods. - - - modified files: - paramiko/channel.py paramiko/sftp_server.py - - -2004-12-11 03:43:18 GMT Robey Pointer patch-123 - - Summary: - clean up authentication - Revision: - secsh--dev--1.0--patch-123 - - add new exception "BadAuthenticationType", which is raised when auth fails - because your auth type (password or public-key) isn't valid on the server. - - used this as an excuse to clean up auth_password and auth_publickey so their - 'event' arg is optional, and if missing, they block until auth is finished, - raising an exception on error. - - also, don't close the session on failed auth -- the server may let you try - again. - - added some test cases for failed auth. - - - modified files: - paramiko/__init__.py paramiko/auth_transport.py - paramiko/ssh_exception.py paramiko/transport.py - tests/test_transport.py - - -2004-12-10 08:30:44 GMT Robey Pointer patch-122 - - Summary: - symlink, readlink - Revision: - secsh--dev--1.0--patch-122 - - add support for symlink command, and finish support for readlink. (i guess - i started readlink a while ago but forgot to add the right method to the - SFTPServerInterface class.) - - - modified files: - paramiko/sftp_server.py paramiko/sftp_si.py tests/test_sftp.py - - -2004-12-10 08:27:43 GMT Robey Pointer patch-121 - - Summary: - other part of that last patch - Revision: - secsh--dev--1.0--patch-121 - - oops, forgot this part. - - modified files: - paramiko/transport.py - - -2004-12-10 08:25:28 GMT Robey Pointer patch-120 - - Summary: - add stderr support methods - Revision: - secsh--dev--1.0--patch-120 - - big embarrassment: i didn't read the ssh2 docs close enough, and all this - time paramiko wasn't handling "extended_data" packets, which contain stderr - output. - - so now, several new functions: recv_stderr_ready() and recv_stderr() to - mirror recv_ready() and recv(), and set_combined_stderr() to force stderr - to be combined into stdout. also, makefile_stderr() to create a fake file - object to represent stderr. - - - modified files: - paramiko/channel.py - - -2004-12-10 07:55:33 GMT Robey Pointer patch-119 - - Summary: - reformat README - Revision: - secsh--dev--1.0--patch-119 - - reformatted the README to a slightly smaller margin, just because. - - - modified files: - README - - -2004-12-09 04:15:12 GMT Robey Pointer patch-118 - - Summary: - fix SFTPFile gettimeout/settimeout - Revision: - secsh--dev--1.0--patch-118 - - i don't think the gettimeout/settimeout calls on SFTPFile ever worked. - also, simplify the implementation of _get_size() since it's nearly - identical to stat(). - - - modified files: - paramiko/sftp_file.py - - -2004-12-09 02:42:36 GMT Robey Pointer patch-117 - - Summary: - readme comments - Revision: - secsh--dev--1.0--patch-117 - - add another fixme to the readme - - modified files: - README - - -2004-11-26 22:07:31 GMT Robey Pointer patch-116 - - Summary: - doc fixups - Revision: - secsh--dev--1.0--patch-116 - - explain "recv_ready" better, and add debug descriptions for the kex codes. - - - modified files: - README paramiko/channel.py paramiko/common.py - - -2004-11-25 19:39:34 GMT Robey Pointer patch-115 - - Summary: - fix CONNECTION_FAILED_CODE - Revision: - secsh--dev--1.0--patch-115 - - oops, fix typo in channel request failed. - - modified files: - paramiko/transport.py - - -2004-11-22 07:40:39 GMT Robey Pointer patch-114 - - Summary: - fix typo in channel - Revision: - secsh--dev--1.0--patch-114 - - fix typo that alain found: pipd_wfd -> pipe_wfd. - - - modified files: - paramiko/channel.py - - -2004-11-22 07:27:21 GMT Robey Pointer patch-113 - - Summary: - sftp server support! - Revision: - secsh--dev--1.0--patch-113 - - finally check in sftp_handle (file handle abstraction), sftp_si (server - interface), and sftp_server (server implementation) -- all of which make - a roughly 90% implementation of server-side sftp. - - - - new files: - paramiko/.arch-ids/sftp_handle.py.id - paramiko/.arch-ids/sftp_server.py.id - paramiko/.arch-ids/sftp_si.py.id paramiko/sftp_handle.py - paramiko/sftp_server.py paramiko/sftp_si.py - - modified files: - README demo_windows.py paramiko/__init__.py - - -2004-11-22 07:07:08 GMT Robey Pointer patch-112 - - Summary: - add finish_subsystem() - Revision: - secsh--dev--1.0--patch-112 - - when a SubsystemHandler is being decomissioned (the client has closed the - channel or transport, or the socket went away), make a callback to let the - handler do any shutdown it needs to. - - - modified files: - paramiko/server.py - - -2004-11-22 07:04:31 GMT Robey Pointer patch-111 - - Summary: - fix extremely unlikely channel counter wrapping - Revision: - secsh--dev--1.0--patch-111 - - Transport's channel counter can overflow after 4 billion some channels are - created. make it wrap back around after 16 million instead. also allow the - logging channel to be set manually. fix some comments elsewhere. - - - modified files: - paramiko/channel.py paramiko/primes.py paramiko/transport.py - - -2004-11-22 07:01:43 GMT Robey Pointer patch-110 - - Summary: - fix Transport.get_username() to work in server mode too - Revision: - secsh--dev--1.0--patch-110 - - whenever i split the 'username' field into username and auth_username, - i guess that made get_username() stop working for server mode (because the - username was stored in a different field). this should fix it. - - modified files: - paramiko/auth_transport.py - - -2004-11-07 03:10:53 GMT Robey Pointer patch-109 - - Summary: - v1.0 (jigglypuff) - Revision: - secsh--dev--1.0--patch-109 - - bump all the version numbers up to 1.0 (jigglypuff). - - modified files: - Makefile README paramiko/__init__.py paramiko/transport.py - setup.py - - -2004-11-07 02:51:42 GMT Robey Pointer patch-108 - - Summary: - add filename to SFTPAttributes - Revision: - secsh--dev--1.0--patch-108 - - add filename to the attributes stored in an SFTPAttributes object. - - modified files: - paramiko/sftp_attr.py - - -2004-11-07 02:31:48 GMT Robey Pointer patch-107 - - Summary: - fix kex_gex - Revision: - secsh--dev--1.0--patch-107 - - fix kex_gex (group-exchange key exchange) to, *cough*, work again, and also - layout kex_group1 a little more sanely. - - modified files: - paramiko/kex_gex.py paramiko/kex_group1.py - - -2004-11-07 02:29:54 GMT Robey Pointer patch-106 - - Summary: - fix chmod +x on demo_windows.py - Revision: - secsh--dev--1.0--patch-106 - - forgot to make demo_windows +x - - -2004-11-07 02:29:20 GMT Robey Pointer patch-105 - - Summary: - move ChangeLog - Revision: - secsh--dev--1.0--patch-105 - - move ChangeLog out of the way because tla can autogenerate any useful - ChangeLog. - - - renamed files: - .arch-ids/ChangeLog.id - ==> .arch-ids/ChangeLog-old.id - ChangeLog - ==> ChangeLog-old - - -2004-11-07 02:28:33 GMT Robey Pointer patch-104 - - Summary: - fix location of SFTPError - Revision: - secsh--dev--1.0--patch-104 - - fix location of SFTPError. - - modified files: - paramiko/__init__.py paramiko/sftp_client.py - - -2004-11-07 02:17:18 GMT Robey Pointer patch-103 - - Summary: - rename sftp constants - Revision: - secsh--dev--1.0--patch-103 - - replace oddly named sftp constants (FX_OK for example) with names that make - a bit more sense when sober (SFTP_OK). - - modified files: - paramiko/__init__.py paramiko/sftp.py paramiko/sftp_client.py - - -2004-11-07 02:08:11 GMT Robey Pointer patch-102 - - Summary: - add key exchange tests + 1 more sftp test - Revision: - secsh--dev--1.0--patch-102 - - add test suite for key-exchange protocols, since i apparently broke the - "gex" protocol recently and never noticed. also add an sftp unit test for - mkdir/rmdir. - - new files: - tests/.arch-ids/test_kex.py.id tests/test_kex.py - - modified files: - test.py tests/test_sftp.py - - -2004-11-07 02:00:50 GMT Robey Pointer patch-101 - - Summary: - remove old demo keys - Revision: - secsh--dev--1.0--patch-101 - - the keys are in tests/ now. - - removed files: - .arch-ids/demo_dss_key.id .arch-ids/demo_rsa_key.id - demo_dss_key demo_rsa_key - - -2004-11-06 20:32:08 GMT Robey Pointer patch-100 - - Summary: - don't forget demo_windows.py - Revision: - secsh--dev--1.0--patch-100 - - update MANIFEST.in to include demo_windows.py and not include the demo - keys (they're in tests/ now). clean up the README to explain the demo - scripts better now, since there are so many of them. then fix up the - demo scripts to look in tests/ for the keys. - - demo_windows.py doesn't need to call get_pty() (in fact, i think that's - blowing openssh's mind) and was executing the wrong command. - - - modified files: - MANIFEST.in README demo_server.py demo_simple.py - demo_windows.py - - -2004-11-01 07:07:48 GMT Robey Pointer patch-99 - - Summary: - use getpass - Revision: - secsh--dev--1.0--patch-99 - - convert raw_input to getpass as suggested many weeks ago. - - modified files: - forward.py - - -2004-11-01 03:54:01 GMT Robey Pointer patch-98 - - Summary: - don't unlink a Channel until the server closes it too - Revision: - secsh--dev--1.0--patch-98 - - when close()'ing a Channel, don't immediately unlink it from the Transport. - instead, wait for the server to send a close message. - - this should fix a bug where doing close() on an EOF'd channel would cause - the entire transport to be killed, because the server would send an - 'exit-status' and 'close' message for a channel that we no longer had a - record of. - - - modified files: - paramiko/channel.py - - -2004-11-01 03:43:28 GMT Robey Pointer patch-97 - - Summary: - better debugging, improve subsytem handler - Revision: - secsh--dev--1.0--patch-97 - - add a list of ssh packet names for debugging. improve the server-mode - subsystem handler so it can take extra parameters (list or keyword) and - pass them to the subsystem constructor. remove a misleading comment - about rekeying (which was already implemented). - - - modified files: - paramiko/common.py paramiko/server.py paramiko/transport.py - - -2004-11-01 03:37:42 GMT Robey Pointer patch-96 - - Summary: - remove key.valid check - Revision: - secsh--dev--1.0--patch-96 - - oops! 'key.valid' no longer works -- catch the SSHException instead, and log - it. - - - modified files: - paramiko/auth_transport.py - - -2004-10-23 07:36:23 GMT Robey Pointer patch-95 - - Summary: - ivysaur 0.9 - Revision: - secsh--dev--1.0--patch-95 - - update ivysaur release date, and add the list of changes to the README - file. - - - modified files: - Makefile README paramiko/__init__.py - - -2004-10-20 16:52:51 GMT Robey Pointer patch-94 - - Summary: - start testing Transport - Revision: - secsh--dev--1.0--patch-94 - - the beginnings of tests for Transport. only the bare minimum is there right - now. - - also started doc'ing things up to ivysaur. - - new files: - .arch-ids/demo_windows.py.id demo_windows.py - tests/.arch-ids/loop.py.id - tests/.arch-ids/test_transport.py.id tests/loop.py - tests/test_transport.py - - modified files: - Makefile README paramiko/__init__.py setup.py test.py - - -2004-10-18 04:54:27 GMT Robey Pointer patch-93 - - Summary: - switch Transport.connect() to using a Pkey object for the host key - Revision: - secsh--dev--1.0--patch-93 - - i suddenly realized that passing "hostkeytype" and "hostkey" as strings to - Transport.connect() was pretty silly since i went to all the effort of making - a class specifically for holding keys. so Transport.connect() now just takes - host-key argument: "hostkey" as a PKey object. - - updated the demos to use PKey objects when reading the host key file, and to - use the new "hostkey" argument. - - - modified files: - demo.py demo_simple.py paramiko/pkey.py paramiko/transport.py - - -2004-09-25 22:07:59 GMT Robey Pointer patch-92 - - Summary: - add rsa/dss key object unit tests - Revision: - secsh--dev--1.0--patch-92 - - add tests for rsa/dss key objects -- yay! - - - new files: - tests/.arch-ids/test_dss.key.id - tests/.arch-ids/test_pkey.py.id - tests/.arch-ids/test_rsa.key.id tests/test_dss.key - tests/test_pkey.py tests/test_rsa.key - - -2004-09-25 22:03:48 GMT Robey Pointer patch-91 - - Summary: - fix test.py to use options instead of env vars, sftp tests default off - Revision: - secsh--dev--1.0--patch-91 - - fix up the test framework so that the sftp unit tests aren't always run (you - have to ask for them explicitly) and they take their configuration from - command-line options. they still require a remote server. - - modified files: - test.py tests/test_sftp.py - - -2004-09-25 21:58:11 GMT Robey Pointer patch-90 - - Summary: - fix __init__ - Revision: - secsh--dev--1.0--patch-90 - - fix __init__ to export BufferedFile and randpool, and to catch up with the - changes from a week or 2 ago where sftp_attr & friends were split off. - - modified files: - paramiko/__init__.py - - -2004-09-25 21:47:19 GMT Robey Pointer patch-89 - - Summary: - fix some Transport docs - Revision: - secsh--dev--1.0--patch-89 - - document that Transport also would like close() and settimeout() to exist - on the socket-like object passed to the constructor. - - modified files: - paramiko/transport.py - - -2004-09-25 21:32:53 GMT Robey Pointer patch-88 - - Summary: - add Message.rewind() - Revision: - secsh--dev--1.0--patch-88 - - add rewind() method to Message, which just resets the pointer so you can - start reading from the beginning again. this is useful for some tests. - - modified files: - paramiko/message.py tests/test_message.py - - -2004-09-25 21:28:23 GMT Robey Pointer patch-87 - - Summary: - clean up pkey interface - Revision: - secsh--dev--1.0--patch-87 - - change the pkey interface so that it's no longer possible to have a pkey - that doesn't represent a valid key. (ie: no more "blank" key objects.) - also add "get_bits" and "can_sign" methods to determine the key bit length - and whether it can sign things (contains the "private parts") respectively. - - modified files: - paramiko/dsskey.py paramiko/pkey.py paramiko/rsakey.py - - -2004-09-11 21:01:32 GMT Robey Pointer patch-86 - - Summary: - unit tests for Message - Revision: - secsh--dev--1.0--patch-86 - - spanking new unit tests for Message. i'm trying to fix the embarrassment - of having so little of paramiko testable. next up is Transport! - - new files: - tests/.arch-ids/test_message.py.id tests/test_message.py - - -2004-09-11 20:56:01 GMT Robey Pointer patch-85 - - Summary: - move SFTPFile and SFTPAttributes into their own files - Revision: - secsh--dev--1.0--patch-85 - - move SFTPFile and SFTPAttributes into their own files. - - new files: - paramiko/.arch-ids/sftp_attr.py.id - paramiko/.arch-ids/sftp_file.py.id paramiko/sftp_attr.py - paramiko/sftp_file.py - - modified files: - paramiko/sftp.py paramiko/sftp_client.py - - -2004-09-11 20:50:39 GMT Robey Pointer patch-84 - - Summary: - add sftp.normalize - Revision: - secsh--dev--1.0--patch-84 - - kevin c. dorff pointed out that it would be nice to expose a way to - determine the server's "current working directory", so this new method - (normalize) directly maps to REALPATH. - - modified files: - paramiko/sftp_client.py - - -2004-09-11 20:43:09 GMT Robey Pointer patch-83 - - Summary: - tweak Message.add() in the key exchanges - Revision: - secsh--dev--1.0--patch-83 - - use the new Message.add() behavior to make a little code here much easier - to read. - - modified files: - paramiko/kex_gex.py paramiko/kex_group1.py - - -2004-09-11 20:40:08 GMT Robey Pointer patch-82 - - Summary: - doc fixes - Revision: - secsh--dev--1.0--patch-82 - - fix "string" -> "str" in types when documenting BufferedFile. - - modified files: - paramiko/file.py - - -2004-09-11 20:37:59 GMT Robey Pointer patch-81 - - Summary: - more unit tests - Revision: - secsh--dev--1.0--patch-81 - - add test for BufferedFile.read(-1) and sftp.normalize(). - - modified files: - tests/test_file.py tests/test_sftp.py - - -2004-09-11 20:36:49 GMT Robey Pointer patch-80 - - Summary: - move SubsystemHandler to server.py - Revision: - secsh--dev--1.0--patch-80 - - move SubsystemHandler into server.py where it makes more sense (it's part of - the server interface). - - also fix up paramiko's "version string" used in ssh2 negotiation to stop - saying "pyssh" and start saying "paramiko". :) - - modified files: - paramiko/server.py paramiko/transport.py - - -2004-09-11 20:35:19 GMT Robey Pointer patch-79 - - Summary: - Message.add() can take many args - Revision: - secsh--dev--1.0--patch-79 - - a bit of cleanup to Message: add() can now take any number of params, and - will add them all in order (using type guessing). - - modified files: - paramiko/message.py - - -2004-09-09 01:36:45 GMT Robey Pointer patch-78 - - Summary: - fix rbuffer -> _rbuffer in 3 places i missed - Revision: - secsh--dev--1.0--patch-78 - - fix 3 places where "rbuffer" hadn't been converted to "_rbuffer". thanks to - kevin c. dorff for the bug report. - - modified files: - paramiko/file.py - - -2004-09-07 06:56:49 GMT Robey Pointer patch-77 - - Summary: - docs for SubsystemHandler - Revision: - secsh--dev--1.0--patch-77 - - add documentation to constructor for SubsystemHandler. - - modified files: - paramiko/transport.py - - -2004-09-07 06:54:31 GMT Robey Pointer patch-76 - - Summary: - add sftp_client.py - Revision: - secsh--dev--1.0--patch-76 - - i retardedly forgot to import this file a few days ago: it's the split-out - client mode for sftp. it now also has some changes to adapt it to the - improved SFTPAttributes object API. - - new files: - paramiko/.arch-ids/sftp_client.py.id paramiko/sftp_client.py - - -2004-09-07 06:51:03 GMT Robey Pointer patch-75 - - Summary: - clean up SFTPAttributes - Revision: - secsh--dev--1.0--patch-75 - - add english descriptions to the FX_* error codes of sftp. clean up (and - document) SFTPAttributes since it's exported now, and make it simple to - generate one from a python os.stat object. make "_pythonize" the default -- - that is, just use the same field names as python does for os.stat. (i'm not - sure why i didn't do it that way in the first place; probably ignorance.) - also add str() method that converts the SFTPAttributes into a string suitable - for use in ls (used in an obscure way in sftp servers). - - modified files: - paramiko/sftp.py - - -2004-09-07 06:45:53 GMT Robey Pointer patch-74 - - Summary: - note pycrypto 2.0 in README - Revision: - secsh--dev--1.0--patch-74 - - update the README to note that pycrypto 2.0 works (i just tried it). also - fix the name from pyCrypt back to pycrypto -- that project is having trouble - making up its mind about naming. :) - - modified files: - README - - -2004-09-05 07:44:03 GMT Robey Pointer patch-73 - - Summary: - split sftp into sftp, sftp_client; renamed SFTP -> SFTPClient - Revision: - secsh--dev--1.0--patch-73 - - add sftp_client file, and split out the common code (sftp) from stuff specific - to client mode (sftp_client). renamed SFTP class to SFTPClient, but left an - alias so old code will still work. - - renamed a bunch of sftp constants now that they're better hidden from epydoc. - - modified files: - README paramiko/__init__.py paramiko/sftp.py - - -2004-09-05 07:41:45 GMT Robey Pointer patch-72 - - Summary: - some framework for adding subsystem handlers in server mode - Revision: - secsh--dev--1.0--patch-72 - - you can now register a subsystem with a Transport by passing in the name - (like "sftp") and a class (like a hypothetical SFTPServer). the default - ServerInterface.check_channel_request_subsystem now checks this table in - Transport, and if it finds a match, it creates a new thread for the handler - and calls into it. a new class SubsystemHandler is added for this purpose - (to be subclassed). - - modified files: - paramiko/server.py paramiko/transport.py - - -2004-09-05 07:37:40 GMT Robey Pointer patch-71 - - Summary: - remove redundant 'auth_complete' member - Revision: - secsh--dev--1.0--patch-71 - - remove the redundant 'auth_complete' field and just use 'authenticated' for - both client and server mode. this makes the repr() string look correct in - server mode instead of always claiming that the transport is un-auth'd. - - modified files: - paramiko/auth_transport.py - - -2004-09-03 22:39:20 GMT Robey Pointer patch-70 - - Summary: - clean up server interface; no longer need to subclass Channel - Revision: - secsh--dev--1.0--patch-70 - - - export AUTH_*, OPEN_FAILED_*, and the new OPEN_SUCCEEDED into the paramiko - namespace instead of making people dig into paramiko.Transport.AUTH_* etc. - - move all of the check_* methods from Channel to ServerInterface so apps - don't need to subclass Channel anymore just to run an ssh server - - ServerInterface.check_channel_request() returns an error code now, not a - new Channel object - - fix demo_server.py to follow all these changes - - fix a bunch of places where i used "string" in docstrings but meant "str" - - added Channel.get_id() - - modified files: - README demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/channel.py - paramiko/common.py paramiko/server.py paramiko/sftp.py - paramiko/transport.py - - -2004-08-31 02:44:56 GMT Robey Pointer patch-69 - - Summary: - clean up SecurityOptions - Revision: - secsh--dev--1.0--patch-69 - - the preferences are now tuples in Transport, and passed as tuples out of - SecurityOptions, so that the options can't be modified without setting them - back to the options field again. the algorithm lists in Transport are used - to validate the fields. - - modified files: - paramiko/transport.py - - -2004-08-30 20:22:10 GMT Robey Pointer patch-68 - - Summary: - added Transport.get_security_options() - Revision: - secsh--dev--1.0--patch-68 - - just something i wanted to play with: - added Transport.get_security_options() which returns a SecurityOptions object. - this object is a kind of proxy for the 4 "preferred_*" fields in Transport, - and lets me avoid exposing those fields directly in case i change my mind - later about how they should be stored. - - added some docs to Channel explaining that the request methods now return - True/False, and fixed up docs in a few other places. - - modified files: - paramiko/__init__.py paramiko/channel.py paramiko/server.py - paramiko/sftp.py paramiko/transport.py - - -2004-08-28 04:21:12 GMT Robey Pointer patch-67 - - Summary: - replay patch 63 (missing channel changes) - Revision: - secsh--dev--1.0--patch-67 - - i'm still getting the hang of tla/arch, obviously. - - replay patch 63, which was meant to be part of the later mega-patch, but - apparently when i reversed it, i lost it entirely. - - modified files: - paramiko/channel.py - - -2004-08-27 00:57:40 GMT Robey Pointer patch-66 - - Summary: - new ServerInterface class, outbound rekey works, etc. - Revision: - secsh--dev--1.0--patch-66 - - a bunch of changes that i'm too lazy to split out into individual patches: - * all the server overrides from transport.py have been moved into a separate - class ServerInterface, so server code doesn't have to subclass the whole - paramiko library - * updated demo_server to subclass ServerInterface - * when re-keying during a session, block other messages until the new keys - are activated (openssh doensn't like any other traffic during a rekey) - * re-key when outbound limits are tripped too (was only counting inbound - traffic) - * don't log scary things on EOF - - - new files: - paramiko/.arch-ids/server.py.id paramiko/server.py - - modified files: - README demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/transport.py - - -2004-08-27 00:28:33 GMT Robey Pointer patch-65 - - Summary: - add settimeout/gettimeout/setblocking, some bugfixes. - Revision: - secsh--dev--1.0--patch-65 - - hide the command and response codes in sftp so they aren't exported. - add settimeout/gettimeout/setblocking that just wrap calls to the underlying - socket or channel. fix _read_all to not catch timeout exceptions. - - - modified files: - paramiko/sftp.py - - -2004-08-27 00:26:35 GMT Robey Pointer patch-64 - - Summary: - reverse messed-up patch - Revision: - secsh--dev--1.0--patch-64 - - Patches applied: - - * robey@lag.net--2003-public/secsh--dev--1.0--base-0 - initial import - - * robey@lag.net--2003-public/secsh--dev--1.0--patch-1 - no changes - - - modified files: - paramiko/channel.py {arch}/=tagging-method - - -2004-08-27 00:06:42 GMT Robey Pointer patch-63 - - Summary: - add settimeout/gettimeout/setblocking, some bugfixes. - Revision: - secsh--dev--1.0--patch-63 - - hide the command and response codes in sftp so they aren't exported. - add settimeout/gettimeout/setblocking that just wrap calls to the underlying - socket or channel. fix _read_all to not catch timeout exceptions. - - modified files: - paramiko/channel.py - - -2004-06-27 20:14:15 GMT Robey Pointer patch-62 - - Summary: - version -> horsea - Revision: - secsh--dev--1.0--patch-62 - - up version to horsea. - - modified files: - Makefile README paramiko/__init__.py setup.py - {arch}/secsh/secsh--dev/secsh--dev--1.0/robey@lag.net--2003-public/patch-log/patch-1 - - -2004-06-10 18:12:00 GMT Robey Pointer patch-61 - - Summary: - no more Foobar - Revision: - secsh--dev--1.0--patch-61 - - fix "Foobar" to be "Paramiko" in the one place i missed it in all the gpl - headers. sigh. :) - - modified files: - paramiko/__init__.py paramiko/auth_transport.py - paramiko/ber.py paramiko/common.py paramiko/dsskey.py - paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/logging22.py paramiko/message.py paramiko/pkey.py - paramiko/primes.py paramiko/rsakey.py - paramiko/ssh_exception.py paramiko/util.py test.py - - -2004-06-10 18:08:50 GMT Robey Pointer patch-60 - - Summary: - limit read/write requests to 32KB, advertise 32KB max packet size - Revision: - secsh--dev--1.0--patch-60 - - one of the unit tests was failing because the openssh sftp server was dropping - the connection without any error. turns out they have a maximum allowed write - size (possibly around 64KB). the sftp rfcs have a small hint that some servers - may drop read/write requests of greater than 32KB. - - so, all reads are limited to 32KB, and all writes > 32KB are now chopped up - and sent in 32KB chunks. this seems to keep openssh happy. - - also, we now advertise 32KB max packet size instead of 8KB (the speed - improves a lot), and log when we read/write a packet. and sftp files are - flushed on seek. - - modified files: - paramiko/sftp.py paramiko/transport.py - - -2004-06-10 18:02:13 GMT Robey Pointer patch-59 - - Summary: - speed up parts of BufferedFile - Revision: - secsh--dev--1.0--patch-59 - - BufferedFile uses cStringIO for the write buffer now (i don't actually notice - any speed difference so this might revert later) and the default buffer size - has been upped from 1KB to 8KB. - - when scanning for linefeeds (when writing to a line-buffered file), only scan - the newly-written bytes, since we know all the previously buffered data is - linefeed-free. this was the #1 slowdown on the 1MB-file unit test. - - also, limit the buffering on line-buffered files to whatever the default - buffer size is. there's no reason to buffer 1MB waiting for a linefeed. - - modified files: - paramiko/file.py - - -2004-06-10 17:55:17 GMT Robey Pointer patch-58 - - Summary: - some Channel fixes for max packet size & blocking on zero window - Revision: - secsh--dev--1.0--patch-58 - - some clean-ups and fixes to channels: - * when send() is blocked on a zero-width window, check that the channel is - still open. this was causing some lockups. - * set a lower bound to the "maximum packet size" we accept from the remote - host. if they tell us anything less than 1KB, assume they meant 1KB. (it's - not reasonable to fragment below that.) - * leave a little padding instead of cutting right up to the maximum packet - size: some space will be taken up by protocol overhead. - * turn off some of the debug log lines unless "ultra_debug" is on (nobody - cares about the feed info) - - - modified files: - paramiko/channel.py - - -2004-06-10 17:35:30 GMT Robey Pointer patch-57 - - Summary: - more unit tests - Revision: - secsh--dev--1.0--patch-57 - - add a unit test for sending a large (1MB) file with line buffering but no - linefeeds (this triggered several bugs and inefficiencies), and another test - to verify that the write buffer is flushed on seek. - - modified files: - tests/test_file.py tests/test_sftp.py - - -2004-05-31 23:48:10 GMT Robey Pointer patch-56 - - Summary: - add forward.py demo script; bump to gyarados - Revision: - secsh--dev--1.0--patch-56 - - add a demo script to show how to do local port forwarding. - - add gyarados to all the docs and bump the version number everywhere. - - new files: - .arch-ids/forward.py.id forward.py - - modified files: - MANIFEST.in Makefile README paramiko/__init__.py setup.py - - -2004-05-29 18:58:11 GMT Robey Pointer patch-55 - - Summary: - add an sftp unit test for making 100 files - Revision: - secsh--dev--1.0--patch-55 - - create 100 files on the remote server, set their mode with chmod, then verify - that they're all there and contain the right data. valeriy is reporting that - sometimes he's getting stuck after 20 and though i'm not seeing it, i want to - add a test to try to pin it down. - - modified files: - tests/test_sftp.py - - -2004-05-29 18:56:10 GMT Robey Pointer patch-54 - - Summary: - add direct-tcpip ability to open_channel - Revision: - secsh--dev--1.0--patch-54 - - open_channel can now be given a dest_addr and src_addr, which are filled in - if the channel type is "forwarded-tcpip" or "direct-tcpip". these channel - types are used in remote & local port forwarding, respectively. i've only - tested "direct-tcpip" but i think if one works, they both should work. - - also fixed a bug in connect where it was still assuming the old meaning for - get_remove_server_key() (oops!) and changed the sense of a send() failure - from <= 0 to < 0 since it may be possible for send() to return 0 and it not - be an EOF error. - - modified files: - paramiko/transport.py - - -2004-05-29 18:48:23 GMT Robey Pointer patch-53 - - Summary: - add note about utf8 encodings - Revision: - secsh--dev--1.0--patch-53 - - add info to the README about what to do if python complains about missing - encodings. veleriy pogrebitskiy ran into this and had advice. - - modified files: - README - - -2004-05-17 07:41:50 GMT Robey Pointer patch-52 - - Summary: - fix deadlock in closing a channel - Revision: - secsh--dev--1.0--patch-52 - - closing a channel would enter an odd codepath where the lock was grabbed, - some stuff was done, then another function was called where the lock was - grabbed again. unfortunately python locks aren't monitors so this would - deadlock. instead, make the smaller function lock-free with an explicit - notice that you must be holding the lock before calling. - - modified files: - paramiko/channel.py - - -2004-05-17 00:43:43 GMT Robey Pointer patch-51 - - Summary: - fix utf8, raise packet size, log exceptions, be more lax with sfp servers - Revision: - secsh--dev--1.0--patch-51 - - explicitly import utf8 encodings for "freezing" (and also because not all - platforms come with utf8, apparently). raise the max acceptable packet size - to 8kB, cuz 2kB was too low. log exceptions at error level instead of debug - level. and don't reject older sftp servers. - - modified files: - paramiko/auth_transport.py paramiko/sftp.py - paramiko/transport.py - - -2004-04-23 22:55:16 GMT Robey Pointer patch-50 - - Summary: - fearow date and last-minute fixes - Revision: - secsh--dev--1.0--patch-50 - - update release date of fearow to 23apr. fix channel._set_closed() to grab - the lock before notifying the in/out buffers that the channel is closed. - try roger's trick for finding the home folder on windows. - - modified files: - Makefile README paramiko/__init__.py paramiko/channel.py - paramiko/common.py - - -2004-04-08 06:31:08 GMT Robey Pointer patch-49 - - Summary: - fix doc typos - Revision: - secsh--dev--1.0--patch-49 - - - modified files: - paramiko/dsskey.py paramiko/rsakey.py paramiko/transport.py - - -2004-04-08 05:48:16 GMT Robey Pointer patch-48 - - Summary: - set version number to fearow - Revision: - secsh--dev--1.0--patch-48 - - set version number to fearow. - - modified files: - Makefile README paramiko/__init__.py setup.py - - -2004-04-08 05:12:20 GMT Robey Pointer patch-47 - - Summary: - add socket.timeout for py22 - Revision: - secsh--dev--1.0--patch-47 - - oops, forgot this vital part of the py22 patches. roger binns sent me a - code patch that included this snip. - - modified files: - paramiko/common.py - - -2004-04-07 16:05:48 GMT Robey Pointer patch-46 - - Summary: - README update notes - Revision: - secsh--dev--1.0--patch-46 - - added notes on what's new, what to watch out for in py22. added a "since: - fearow" to all the relevant API calls that are new. - - modified files: - README paramiko/auth_transport.py paramiko/dsskey.py - paramiko/pkey.py paramiko/rsakey.py paramiko/transport.py - - -2004-04-07 15:52:07 GMT Robey Pointer patch-45 - - Summary: - add set_keepalive() - Revision: - secsh--dev--1.0--patch-45 - - add set_keepalive() to set an automatic keepalive mechanism. (while waiting - for a packet on a connection, we periodically check if it's time to send a - keepalive packet.) - - modified files: - paramiko/transport.py - - -2004-04-07 06:07:29 GMT Robey Pointer patch-44 - - Summary: - add get_username() method for remembering who you auth'd as - Revision: - secsh--dev--1.0--patch-44 - - add get_username() method for remembering who you auth'd as. also, fix these - bugs: - * "continue" auth response counted as a failure (in server mode). - * try to import 'logging' in py22 before falling back to the fake logger, - in case they have a backported version of 'logger' - * raise the right exception when told to read a private key from a file that - isn't a private key file - * tell channels to close when the transport dies - - modified files: - paramiko/auth_transport.py paramiko/channel.py - paramiko/common.py paramiko/pkey.py paramiko/transport.py - - -2004-04-06 22:03:21 GMT Robey Pointer patch-43 - - Summary: - fix encrypted private key files - Revision: - secsh--dev--1.0--patch-43 - - the random byte padding on private key files' BER data was confusing openssh, - so switch to null-byte padding, which is slightly less secure but works with - crappy old openssh. also, enforce the mode when writing the private key - file. we really really want it to be 0600. (python seems to ignore the - mode normally.) - - modified files: - paramiko/pkey.py - - -2004-04-06 08:16:02 GMT Robey Pointer patch-42 - - Summary: - support py22, more or less - Revision: - secsh--dev--1.0--patch-42 - - add roger binns' patches for supporting python 2.2. i hedged a bit on the - logging stuff and just added some trickery to let logging be stubbed out for - python 2.2. this changed a lot of import statements but i managed to avoid - hacking at any of the existing logging. - - socket timeouts are required for the threads to notice when they've been - deactivated. worked around it by using the 'select' module on py22. - - also fixed the sftp unit tests to cope with a password-protected private key. - - new files: - paramiko/.arch-ids/logging22.py.id paramiko/logging22.py - - modified files: - README demo.py demo_server.py demo_simple.py - paramiko/__init__.py paramiko/auth_transport.py - paramiko/channel.py paramiko/common.py paramiko/kex_gex.py - paramiko/kex_group1.py paramiko/message.py paramiko/sftp.py - paramiko/transport.py paramiko/util.py tests/test_sftp.py - - -2004-04-05 22:32:03 GMT Robey Pointer patch-41 - - Summary: - make get_remote_server_key() return a PKey object - Revision: - secsh--dev--1.0--patch-41 - - a good suggestion from roger binns: make get_remote_server_key() just return - a pkey object instead of a tuple of strings. all the strings can be extracted - from the pkey object, as well as other potentially useful things. - - modified files: - demo.py paramiko/transport.py - - -2004-04-05 19:36:40 GMT Robey Pointer patch-40 - - Summary: - add dss key generation too, and fix some bugs - Revision: - secsh--dev--1.0--patch-40 - - added the ability to generate dss keys and write private dss key files, - similar to rsa. in the process, fixed a couple of bugs with ber encoding - and writing password-encrypted key files. the key has to be padded to the - iblock size of the cipher -- it's very difficult to determine how the others - do this, so i just add random bytes to the end. - - fixed the simple demo to use Transport's (host, port) constructor for - simplicity, and fixed a bug where the standard demo's DSS login wouldn't - work. - - also, move the common logfile setup crap into util so all the demos can just - call that one. - - modified files: - demo.py demo_simple.py paramiko/ber.py paramiko/dsskey.py - paramiko/pkey.py paramiko/rsakey.py paramiko/util.py - - -2004-04-05 10:37:18 GMT Robey Pointer patch-39 - - Summary: - add global request mechanism - Revision: - secsh--dev--1.0--patch-39 - - add transport.global_request() to make a global-style request (usually an - extension to the protocol -- like keepalives) and handle requests from the - remote host. incoming requests are now handled and responded to correctly, - which should make openssh-style keepalives work. (before, we would silently - ignore them, which was wrong.) - - modified files: - paramiko/common.py paramiko/message.py paramiko/transport.py - - -2004-04-05 10:24:33 GMT Robey Pointer patch-38 - - Summary: - add common.py file - Revision: - secsh--dev--1.0--patch-38 - - missing from previous change because tla doesn't like to add files in some - situations. (frown) - - - new files: - paramiko/.arch-ids/common.py.id paramiko/common.py - - -2004-04-05 10:16:31 GMT Robey Pointer patch-37 - - Summary: - can now generate rsa keys (not dss yet) - Revision: - secsh--dev--1.0--patch-37 - - added functionality to ber to create ber streams. added some common methods - to PKey to allow dumping the key to base64 (the format used by openssh for - public key files and host key lists), and a factory for creating a key from - a private key file, and a common way to save private keys. RSAKey luckily - didn't have to change that much. - - also added a factory method to RSAKey to generate a new key. - - - modified files: - paramiko/ber.py paramiko/pkey.py paramiko/rsakey.py - - -2004-04-05 10:12:59 GMT Robey Pointer patch-36 - - Summary: - add common.py for commonly used constants and globals - Revision: - secsh--dev--1.0--patch-36 - - common.py now stores the constants and globals. - lots of renaming because of this. - - modified files: - paramiko/auth_transport.py paramiko/channel.py - paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/transport.py paramiko/util.py - - -2004-04-02 02:41:43 GMT Robey Pointer patch-35 - - Summary: - add send_ignore - Revision: - secsh--dev--1.0--patch-35 - - add send_ignore() call to allow for sending garbage ignored packets to the - remote side. - - modified files: - paramiko/transport.py - - -2004-03-16 07:33:09 GMT Robey Pointer patch-34 - - Summary: - fix some arcana in unpacking private keys - Revision: - secsh--dev--1.0--patch-34 - - "!= type([])" is a pretty obscure way to say it. let's try "is not list" - which is a lot more readable. - - (mostly this is a test to make sure tla is working okay on my laptop.) - - modified files: - paramiko/dsskey.py paramiko/rsakey.py - - -2004-03-09 01:09:17 GMT Robey Pointer patch-33 - - Summary: - include tests in manifest - Revision: - secsh--dev--1.0--patch-33 - - include the tests in the manifest for dist, and remove some outdated notes in - NOTES about the exported API (this is doc'd wayyy better in epydoc now). - - modified files: - MANIFEST.in NOTES - - -2004-03-08 17:54:19 GMT Robey Pointer patch-32 - - Summary: - add unit tests - Revision: - secsh--dev--1.0--patch-32 - - add unit tests for BufferedFile and SFTP (it's a start). remove the demo sftp - client because it was 99% copied from the other demos, which makes it kinda - confusing. the unit tests are a much better example. - - new files: - .arch-ids/test.py.id test.py tests/.arch-ids/=id - tests/.arch-ids/test_file.py.id - tests/.arch-ids/test_sftp.py.id tests/test_file.py - tests/test_sftp.py - - removed files: - .arch-ids/demo_sftp.py.id demo_sftp.py - - new directories: - tests tests/.arch-ids - - -2004-03-08 17:52:25 GMT Robey Pointer patch-31 - - Summary: - bump version number to eevee - Revision: - secsh--dev--1.0--patch-31 - - bump the version number to eevee in a few places and talk about the unit - tests. - - modified files: - Makefile README paramiko/__init__.py setup.py - - -2004-03-08 17:50:49 GMT Robey Pointer patch-30 - - Summary: - finish up client sftp support - Revision: - secsh--dev--1.0--patch-30 - - added 'stat' to SFTPFile and SFTP, documented 'open' and 'listdir', and added - 'rmdir', 'lstat', 'symlink', 'chmod', 'chown', 'utime', 'readlink'. - - turned off ultra debugging now that the unit tests are all working. - - modified files: - paramiko/sftp.py - - -2004-03-08 17:45:44 GMT Robey Pointer patch-29 - - Summary: - fix some docs and BufferedFile.readline - Revision: - secsh--dev--1.0--patch-29 - - fix some documentation and fix readline()'s universal newline support to - always return strings ending with '\n', regardless of how they were in the - original file. (this is an obvious feature of python's universal newline - support that i somehow missed before.) - - modified files: - paramiko/file.py paramiko/message.py - - -2004-03-08 09:47:47 GMT Robey Pointer patch-28 - - Summary: - fix lingering thread bug - Revision: - secsh--dev--1.0--patch-28 - - this bug has been in there forever and i could never figure out a workaround - till now. - - when the python interpreter exits, it doesn't necessarily destroy the - remaining objects or call __del__ on anything, and it will lock up until all - threads finish running. how the threads are supposed to notice the exiting - interpreter has always been sort of a mystery to me. - - tonight i figured out how to use the 'atexit' module to register a handler - that runs when the interpreter exits. now we keep a list of active threads - and ask them all to exit on shutdown. no more going to another shell to - kill -9 python! yeah!! - - modified files: - paramiko/transport.py - - -2004-03-04 08:21:45 GMT Robey Pointer patch-27 - - Summary: - add BufferedFile abstraction - Revision: - secsh--dev--1.0--patch-27 - - SFTP client mode is mostly functional. there are probably still some bugs - but most of the operations on "file" objects have survived my simple tests. - - BufferedFile wraps a simpler stream in something that looks like a python - file (and can even handle seeking if the stream underneath supports it). - it's meant to be subclassed. most of it is ripped out of what used to be - ChannelFile so i can reuse it for sftp -- ChannelFile is now tiny. - - SFTP and Message are now exported. - - fixed util.format_binary_line to not quote spaces. - - new files: - .arch-ids/demo_sftp.py.id demo_sftp.py - paramiko/.arch-ids/file.py.id paramiko/.arch-ids/sftp.py.id - paramiko/file.py paramiko/sftp.py - - modified files: - paramiko/__init__.py paramiko/channel.py paramiko/message.py - paramiko/util.py - - -2004-01-27 02:04:59 GMT Robey Pointer patch-26 - - Summary: - Transport constructor can take hostname or address tuple - Revision: - secsh--dev--1.0--patch-26 - - part of an ongoing attempt to make "simple" versions of some of the API calls, - so you can do common-case operations with just a few calls: - - Transport's constructor will now let you pass in a string or tuple instead - of a socket-like object. if you pass in a string, it assumes the string is - a hostname (with optional ":port" segment) and turns that into an address - tuple. if you pass in a tuple, it assumes it's an address tuple. in both - cases, it then creates a socket, connects to the given address, and then - continues as if that was the socket passed in. - - the idea being that you can call Transport('example.com') and it will do - the right thing. - - modified files: - paramiko/transport.py - - -2004-01-27 02:00:19 GMT Robey Pointer patch-25 - - Summary: - pkey no longer raises binascii.Error - Revision: - secsh--dev--1.0--patch-25 - - catch binascii.Error in the private key decoder and convert it into an - SSHException. there's no reason people should have to care that it was a - decoding error vs. any of the other million things that could be wrong in - a corrupt key file. - - modified files: - paramiko/pkey.py - - -2004-01-27 01:45:44 GMT Robey Pointer patch-24 - - Summary: - document more of Message; add get_int64 - Revision: - secsh--dev--1.0--patch-24 - - all of the get_* methods are now documented, but there's a bit more to do. - get_int64 added for eventual sftp support. - - modified files: - paramiko/message.py - - -2004-01-04 10:33:05 GMT Robey Pointer patch-23 - - Summary: - quick doc fix. - Revision: - secsh--dev--1.0--patch-23 - - fix broken cross-link in kex_gex docs. - - modified files: - paramiko/kex_gex.py - - -2004-01-04 10:26:00 GMT Robey Pointer patch-22 - - Summary: - fix MANIFEST.in, change version numbers to 0.9-doduo, fix LPGL notices - Revision: - secsh--dev--1.0--patch-22 - - fixed MANIFEST.in to include the demo scripts, LICENSE, and ChangeLog. - upped everything to version 0.9-doduo. - - fixed the copyright notice, and added the LGPL banner to the top of every - python file. - - modified files: - MANIFEST.in Makefile NOTES README paramiko/__init__.py - paramiko/auth_transport.py paramiko/ber.py paramiko/channel.py - paramiko/dsskey.py paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/message.py paramiko/pkey.py paramiko/primes.py - paramiko/rsakey.py paramiko/ssh_exception.py - paramiko/transport.py paramiko/util.py setup.py - - -2004-01-04 10:07:35 GMT Robey Pointer patch-21 - - Summary: - MANIFEST -> MANIFEST.in, fix setup.py. - Revision: - secsh--dev--1.0--patch-21 - - out with MANIFEST, in with MANIFEST.in. - - new files: - .arch-ids/MANIFEST.in.id MANIFEST.in - - removed files: - .arch-ids/MANIFEST.id MANIFEST - - modified files: - setup.py - - -2004-01-04 09:29:13 GMT Robey Pointer patch-20 - - Summary: - more docs, and password-protected key files can now be read - Revision: - secsh--dev--1.0--patch-20 - - lots more documentation, some of it moved out of the README file, which is - now much smaller and less rambling. - - repr(Transport) now reports the number of bits used in the cipher. - - cleaned up BER to use util functions, and throw a proper exception (the new - BERException) on error. it doesn't ever have to be a full BER decoder, but - it can at least comb its hair and tuck in its shirt. - - lots of stuff added to PKey.read_private_key_file so it can try to decode - password-protected key files. right now it only understands "DES-EDE3-CBC" - format, but this is the only format i've seen openssh make so far. if the - key is password-protected, but no password was given, a new exception - (PasswordRequiredException) is raised so an outer layer can ask for a password - and try again. - - modified files: - README demo.py demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/ber.py paramiko/channel.py - paramiko/dsskey.py paramiko/pkey.py paramiko/primes.py - paramiko/rsakey.py paramiko/ssh_exception.py - paramiko/transport.py paramiko/util.py - - -2003-12-31 06:31:43 GMT Robey Pointer patch-19 - - Summary: - renamed auth_key -> auth_publickey; more docs. - Revision: - secsh--dev--1.0--patch-19 - - renamed Transport.auth_key to auth_publickey for consistency. and lots more - documentation. - - modified files: - README demo.py demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/transport.py - - -2003-12-30 22:24:21 GMT Robey Pointer patch-18 - - Summary: - added public-key support to server mode, more docs - Revision: - secsh--dev--1.0--patch-18 - - added public-key support to server mode (it can now verify a client signature) - and added a demo of that to the demo_server.py script (user_rsa_key). in the - process, cleaned up the API of PKey so that now it only has to know about - signing and verifying ssh2 blobs, and can be hashed and compared with other - keys (comparing & hashing only the public parts of the key). keys can also - be created from strings now too. - - some more documentation and hiding private methods. - - new files: - .arch-ids/user_rsa_key.id .arch-ids/user_rsa_key.pub.id - user_rsa_key user_rsa_key.pub - - modified files: - Makefile demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/dsskey.py - paramiko/kex_gex.py paramiko/kex_group1.py paramiko/pkey.py - paramiko/rsakey.py paramiko/transport.py - - -2003-12-30 07:18:20 GMT Robey Pointer patch-17 - - Summary: - lots more documentation, and added Transport.connect() - Revision: - secsh--dev--1.0--patch-17 - - renamed demo_host_key to demo_rsa_key. moved changelog to a separate file, - and indicated that future changelog entries should be fetched from tla. - tried to clean up "__all__" in a way that makes epydoc still work. - - added lots more documentation, and renamed many methods and vars to hide - them as private non-exported API. - - Transport's ModulusPack is now a static member, so it only has to be loaded - once, and can then be used by any future Transport object. - - added Transport.connect(), which tries to wrap all the SSH2 negotiation and - authentication into one method. you should be able to create a Transport, - call connect(), and then create channels. - - new files: - .arch-ids/ChangeLog.id .arch-ids/demo_simple.py.id ChangeLog - demo_simple.py paramiko/.arch-ids/pkey.py.id paramiko/pkey.py - - removed files: - .arch-ids/paramiko.py.id paramiko.py - - modified files: - Makefile NOTES README demo.py demo_server.py - paramiko/__init__.py paramiko/auth_transport.py - paramiko/channel.py paramiko/dsskey.py paramiko/kex_gex.py - paramiko/kex_group1.py paramiko/rsakey.py - paramiko/transport.py setup.py {arch}/=tagging-method - - renamed files: - .arch-ids/demo_host_key.id - ==> .arch-ids/demo_rsa_key.id - demo_host_key - ==> demo_rsa_key - - -2003-12-28 03:20:42 GMT Robey Pointer patch-16 - - Summary: - hook up server-side kex-gex; add more documentation - Revision: - secsh--dev--1.0--patch-16 - - group-exchange kex should work now on the server side. it will only be - advertised if a "moduli" file has been loaded (see the -gasp- docs) so we - don't spend hours (literally. hours.) computing primes. some of the logic - was previously wrong, too, since it had never been tested. - - fixed repr() string for Transport/BaseTransport. moved is_authenticated to - Transport where it belongs. - - added lots of documentation (but still only about 10% documented). lots of - methods were made private finally. - - new files: - paramiko/.arch-ids/primes.py.id paramiko/primes.py - - modified files: - NOTES demo.py demo_server.py paramiko/__init__.py - paramiko/auth_transport.py paramiko/channel.py - paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/transport.py paramiko/util.py - - -2003-12-27 02:03:44 GMT Robey Pointer patch-15 - - Summary: - fix up new paramiko/ folder. - Revision: - secsh--dev--1.0--patch-15 - - moved SSHException to a new file (ssh_exception.py) and turned paramiko.py - into an __init__.py file. i'm still not entirely sure how this normally - works, so i may have done something wrong, but it's supposed to work the - same as before. - - new files: - paramiko/.arch-ids/__init__.py.id - paramiko/.arch-ids/ssh_exception.py.id paramiko/__init__.py - paramiko/ssh_exception.py - - modified files: - paramiko/auth_transport.py paramiko/channel.py - paramiko/dsskey.py paramiko/kex_gex.py paramiko/kex_group1.py - paramiko/transport.py - - -2003-12-27 01:49:19 GMT Robey Pointer patch-14 - - Summary: - move the paramiko files into a paramiko/ folder. - Revision: - secsh--dev--1.0--patch-14 - - just moving the files into a folder. it won't build this way yet. - - new files: - paramiko/.arch-ids/=id - - renamed files: - .arch-ids/auth_transport.py.id - ==> paramiko/.arch-ids/auth_transport.py.id - .arch-ids/ber.py.id - ==> paramiko/.arch-ids/ber.py.id - .arch-ids/channel.py.id - ==> paramiko/.arch-ids/channel.py.id - .arch-ids/dsskey.py.id - ==> paramiko/.arch-ids/dsskey.py.id - .arch-ids/kex_gex.py.id - ==> paramiko/.arch-ids/kex_gex.py.id - .arch-ids/kex_group1.py.id - ==> paramiko/.arch-ids/kex_group1.py.id - .arch-ids/message.py.id - ==> paramiko/.arch-ids/message.py.id - .arch-ids/rsakey.py.id - ==> paramiko/.arch-ids/rsakey.py.id - .arch-ids/transport.py.id - ==> paramiko/.arch-ids/transport.py.id - .arch-ids/util.py.id - ==> paramiko/.arch-ids/util.py.id - auth_transport.py - ==> paramiko/auth_transport.py - ber.py - ==> paramiko/ber.py - channel.py - ==> paramiko/channel.py - dsskey.py - ==> paramiko/dsskey.py - kex_gex.py - ==> paramiko/kex_gex.py - kex_group1.py - ==> paramiko/kex_group1.py - message.py - ==> paramiko/message.py - rsakey.py - ==> paramiko/rsakey.py - transport.py - ==> paramiko/transport.py - util.py - ==> paramiko/util.py - - new directories: - paramiko paramiko/.arch-ids - - -2003-12-24 22:09:43 GMT Robey Pointer patch-13 - - Summary: - fix a deadlock/race in handle_eof & close - Revision: - secsh--dev--1.0--patch-13 - - (patch from fred gansevles) - add locking around the eof handler and the close() call, so we can't be in - both simultaneously. - - modified files: - channel.py - - -2003-12-24 20:49:38 GMT Robey Pointer patch-12 - - Summary: - fix dss key signing - Revision: - secsh--dev--1.0--patch-12 - - (expanded on a patch from fred gansevles) - add a demo dss key for server mode, and fix some bugs that had caused the dss - signing stuff to never work before. the demo_server is a bit more verbose - now, too. both key types (RSAKey & DSSKey) now have a function to return the - fingerprint of the key, and both versions of read_private_key_file() now raise - exceptions on failure, instead of just silently setting "valid" to false. - - new files: - .arch-ids/demo_dss_key.id demo_dss_key - - modified files: - demo_server.py dsskey.py kex_gex.py kex_group1.py paramiko.py - rsakey.py transport.py - - -2003-12-23 06:44:56 GMT Robey Pointer patch-11 - - Summary: - in server mode, don't offer keys we don't have - Revision: - secsh--dev--1.0--patch-11 - - (from Paolo Losi) in server mode, when advertising which key methods we - support, don't list methods that we don't have any existing keys for. - - modified files: - transport.py - - -2003-12-23 06:36:27 GMT Robey Pointer patch-10 - - Summary: - add logfiles and .pyc files to the "junk" list - Revision: - secsh--dev--1.0--patch-10 - - add *.log and *.pyc to the explicit junk list. - - modified files: - {arch}/=tagging-method - - -2003-11-10 08:49:50 GMT Robey Pointer patch-9 - - Summary: - rename secsh -> paramiko - Revision: - secsh--dev--1.0--patch-9 - - also, rename SecshException back to SSHException. sigh. :) - - modified files: - ./MANIFEST ./Makefile ./NOTES ./README ./auth_transport.py - ./channel.py ./demo.py ./demo_server.py ./kex_gex.py - ./kex_group1.py ./message.py ./paramiko.py ./setup.py - ./transport.py - - renamed files: - ./.arch-ids/secsh.py.id - ==> ./.arch-ids/paramiko.py.id - ./secsh.py - ==> ./paramiko.py - - -2003-11-10 06:52:35 GMT Robey Pointer patch-8 - - Summary: - doc changes - Revision: - secsh--dev--1.0--patch-8 - - - modified files: - ./README ./demo_server.py ./secsh.py - - -2003-11-10 04:54:02 GMT Robey Pointer patch-7 - - Summary: - cleaned up server code, renamed some files & classes - Revision: - secsh--dev--1.0--patch-7 - - renamed demo-server.py and demo-host-key to demo_server.py and - demo_host_key, just to be consistent. - - renamed SSHException -> SecshException. - - generalized the mechanism where Channel decides whether to allow - different channel requests: 4 of the main ones (pty, window-change, - shell, and subsystem) go through easily override-able methods now. - you could probably make an actual ssh shell server. - - gave ChannelFile a repr(). - - turned off ultra debugging in the demos. demo_server creates a - subclass of Channel to allow pty/shell and sets an event when the - shell request is made, so that it knows when it can start sending - the fake bbs. - - renamed to charmander and updated some of the distutils files. - - modified files: - ./MANIFEST ./NOTES ./auth_transport.py ./channel.py ./demo.py - ./demo_server.py ./kex_gex.py ./kex_group1.py ./secsh.py - ./setup.py ./transport.py - - renamed files: - ./.arch-ids/demo-host-key.id - ==> ./.arch-ids/demo_host_key.id - ./.arch-ids/demo-server.py.id - ==> ./.arch-ids/demo_server.py.id - ./demo-host-key - ==> ./demo_host_key - ./demo-server.py - ==> ./demo_server.py - - -2003-11-09 21:16:35 GMT Robey Pointer patch-6 - - Summary: - notes about the exported api - Revision: - secsh--dev--1.0--patch-6 - - just wrote some quick notes (for a few of the classes) about which - methods are intended to be the exported API. python has no decent - way of distinguishing private vs public. - - - modified files: - ./NOTES - - -2003-11-09 21:14:21 GMT Robey Pointer patch-5 - - Summary: - big chunk of work which makes server code 95% done - Revision: - secsh--dev--1.0--patch-5 - - fixed auth check methods to return just a result (failed, succeeded, - partially succeeded) and always use get_allowed_auths to determine the - list of allowed auth methods to return. - - channel's internal API changed a bit to allow for client-side vs. - server-side channels. we now honor the "want-reply" bit from channel - requests. in server mode (for now), we automatically allow pty-req - and shell requests without doing anything. - - ChannelFile was fixed up a bit to support universal newlines. readline - got rewritten: the old way used the "greedy" read call from ChannelFile, - which won't work if the socket doesn't have that much data buffered and - ready. now it uses recv directly, and tracks the different newlines. - - demo-server.py now answers to a single shell request (like a CLI ssh - tool will make) and does a very simple demo pretending to be a BBS. - - transport: fixed a bug with parsing the remote side's banner. channel - requests are passed to another method in server mode, to determine if - we should allow it. new allowed channels are added to an accept queue, - and a new method 'accept' (with timeout) will block until the next - incoming channel is ready. - - - modified files: - ./auth_transport.py ./channel.py ./demo-server.py ./demo.py - ./transport.py - - -2003-11-09 20:59:51 GMT Robey Pointer patch-4 - - Summary: - change kex-gex server code to generate primes by hand - Revision: - secsh--dev--1.0--patch-4 - - added a util function "generate_prime" to compare to the incredibly slow C - version, but it's no faster of course. i think kex-gex from the server is - just not going to be feasible without having a separate thread generate some - primes in the background to have handy when a request comes in. so in short, - this still doesn't work. - - also i put bit_length into util and a tb_strings function which gets stack - traceback info and splits it into a list of strings. - - - modified files: - ./kex_gex.py ./util.py - - -2003-11-07 10:36:42 GMT Robey Pointer patch-3 - - Summary: - remove some leftover garbage from dsskey - Revision: - secsh--dev--1.0--patch-3 - - leftover from a cut & paste i was doing a few days ago. bad robey. - - modified files: - ./dsskey.py - - -2003-11-06 07:34:27 GMT Robey Pointer patch-2 - - Summary: - add a demo host key and point demo-server at it. - Revision: - secsh--dev--1.0--patch-2 - - also, temporarily comment out the nonfunctional kex-gex method. - - new files: - ./.arch-ids/demo-host-key.id ./demo-host-key - - modified files: - ./demo-server.py ./transport.py - - -2003-11-04 08:50:22 GMT Robey Pointer patch-1 - - Summary: - no changes - Revision: - secsh--dev--1.0--patch-1 - - why aren't my log messages kept? - - modified files: - ./kex_gex.py - - new patches: - robey@lag.net--2003/secsh--dev--1.0--patch-1 - - -2003-11-04 08:34:24 GMT Robey Pointer base-0 - - Summary: - initial import - Revision: - secsh--dev--1.0--base-0 - - - (automatically generated log message) - - new files: - ./LICENSE ./MANIFEST ./Makefile ./NOTES ./README - ./auth_transport.py ./ber.py ./channel.py ./demo-server.py - ./demo.py ./dsskey.py ./kex_gex.py ./kex_group1.py - ./message.py ./rsakey.py ./secsh.py ./setup.py ./transport.py - ./util.py - - new patches: - robey@lag.net--2003/secsh--dev--1.0--base-0 - - diff --git a/Dockerfile.i386 b/Dockerfile.i386 new file mode 100644 index 000000000..76418cede --- /dev/null +++ b/Dockerfile.i386 @@ -0,0 +1,10 @@ +# Convenience tool for testing a 32-bit related security flaw. May be +# generalized in future to be more useful; until then, it is NOT +# officially supported but purely a maintainer-facing artifact. + +FROM --platform=i386 i386/alpine:3.15 + +RUN apk add openssl-dev python3-dev libffi-dev make cargo + +RUN python3 -m venv env +RUN env/bin/pip install -U pip diff --git a/MANIFEST.in b/MANIFEST.in index e718ea24c..c7de90974 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,5 @@ -include LICENSE test.py setup_helper.py +include LICENSE setup_helper.py recursive-include docs * -recursive-include tests *.py *.key +recursive-include tests *.py *.key *.pub +recursive-include tests/configs * recursive-include demos *.py *.key user_rsa_key user_rsa_key.pub diff --git a/README.rst b/README.rst index db342e22c..f99090d33 100644 --- a/README.rst +++ b/README.rst @@ -1,136 +1,48 @@ -======== -Paramiko -======== - -.. Continuous integration and code coverage badges - -.. image:: https://travis-ci.org/paramiko/paramiko.svg?branch=master - :target: https://travis-ci.org/paramiko/paramiko -.. image:: https://coveralls.io/repos/paramiko/paramiko/badge.svg?branch=master&service=github - :target: https://coveralls.io/github/paramiko/paramiko?branch=master - -:Paramiko: Python SSH module -:Copyright: Copyright (c) 2003-2009 Robey Pointer -:Copyright: Copyright (c) 2013-2016 Jeff Forcier -:License: `LGPL `_ -:Homepage: http://www.paramiko.org/ -:API docs: http://docs.paramiko.org -:Development: https://github.com/paramiko/paramiko - - -What ----- - -"Paramiko" is a combination of the Esperanto words for "paranoid" and -"friend". It's a module for Python 2.6+/3.3+ that implements the SSH2 protocol -for secure (encrypted and authenticated) connections to remote machines. Unlike -SSL (aka TLS), SSH2 protocol does not require hierarchical certificates signed -by a powerful central authority. You may know SSH2 as the protocol that -replaced Telnet and rsh for secure access to remote shells, but the protocol -also includes the ability to open arbitrary channels to remote services across -the encrypted tunnel (this is how SFTP works, for example). - -It is written entirely in Python (though it depends on third-party C wrappers -for low level crypto; these are often available precompiled) and is released -under the GNU Lesser General Public License (`LGPL -`_). - -The package and its API is fairly well documented in the ``docs`` folder that -should have come with this archive. - - -Installation ------------- - -For most users, the recommended method to install is via pip:: - - pip install paramiko - -For more detailed instructions, see the `Installing -`_ page on the main Paramiko website. - - -Portability Issues ------------------- - -Paramiko primarily supports POSIX platforms with standard OpenSSH -implementations, and is most frequently tested on Linux and OS X. Windows is -supported as well, though it may not be as straightforward. - -Bugs & Support --------------- - -:Bug Reports: `Github `_ -:Mailing List: ``paramiko@librelist.com`` (see the `LibreList website - `_ for usage details). -:IRC: ``#paramiko`` on Freenode - - -Kerberos Support ----------------- - -Paramiko ships with optional Kerberos/GSSAPI support; for info on the extra -dependencies for this, see the `GSS-API section -`_ -on the main Paramiko website. - - -Demo ----- - -Several demo scripts come with Paramiko to demonstrate how to use it. -Probably the simplest demo of all is this:: - - import paramiko, base64 - key = paramiko.RSAKey(data=base64.decodestring('AAA...')) - client = paramiko.SSHClient() - client.get_host_keys().add('ssh.example.com', 'ssh-rsa', key) - client.connect('ssh.example.com', username='strongbad', password='thecheat') - stdin, stdout, stderr = client.exec_command('ls') - for line in stdout: - print '... ' + line.strip('\n') - client.close() - -This prints out the results of executing ``ls`` on a remote server. The host -key 'AAA...' should of course be replaced by the actual base64 encoding of the -host key. If you skip host key verification, the connection is not secure! - -The following example scripts (in demos/) get progressively more detailed: - -:demo_simple.py: - Calls invoke_shell() and emulates a terminal/TTY through which you can - execute commands interactively on a remote server. Think of it as a - poor man's SSH command-line client. - -:demo.py: - Same as demo_simple.py, but allows you to authenticate using a private - key, attempts to use an SSH agent if present, and uses the long form of - some of the API calls. - -:forward.py: - Command-line script to set up port-forwarding across an SSH transport. - -:demo_sftp.py: - Opens an SFTP session and does a few simple file operations. - -:demo_server.py: - An SSH server that listens on port 2200 and accepts a login for - 'robey' (password 'foo'), and pretends to be a BBS. Meant to be a - very simple demo of writing an SSH server. - -:demo_keygen.py: - A key generator similar to OpenSSH ``ssh-keygen(1)`` program with - Paramiko keys generation and progress functions. - -Use ---- - -The demo scripts are probably the best example of how to use this package. -There is also a lot of documentation, generated with Sphinx autodoc, in the -doc/ folder. - -There are also unit tests here:: - - $ python ./test.py - -Which will verify that most of the core components are working correctly. +|version| |python| |license| |ci| |coverage| + +.. |version| image:: https://img.shields.io/pypi/v/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Package Version +.. |python| image:: https://img.shields.io/pypi/pyversions/paramiko + :target: https://pypi.org/project/paramiko/ + :alt: PyPI - Python Version +.. |license| image:: https://img.shields.io/pypi/l/paramiko + :target: https://github.com/paramiko/paramiko/blob/main/LICENSE + :alt: PyPI - License +.. |ci| image:: https://img.shields.io/circleci/build/github/paramiko/paramiko/main + :target: https://app.circleci.com/pipelines/github/paramiko/paramiko + :alt: CircleCI +.. |coverage| image:: https://img.shields.io/codecov/c/gh/paramiko/paramiko + :target: https://app.codecov.io/gh/paramiko/paramiko + :alt: Codecov + +Welcome to Paramiko! +==================== + +Paramiko is a pure-Python [#]_ (2.7, 3.4+) implementation of the SSHv2 protocol +[#]_, providing both client and server functionality. It provides the +foundation for the high-level SSH library `Fabric `_, +which is what we recommend you use for common client use-cases such as running +remote shell commands or transferring files. + +Direct use of Paramiko itself is only intended for users who need +advanced/low-level primitives or want to run an in-Python sshd. + +For installation information, changelogs, FAQs and similar, please visit `our +main project website `_; for API details, see `the +versioned docs `_. Additionally, the project +maintainer keeps a `roadmap `_ on his +personal site. + +.. [#] + Paramiko relies on `cryptography `_ for crypto + functionality, which makes use of C and Rust extensions but has many + precompiled options available. See `our installation page + `_ for details. + +.. [#] + SSH is defined in :rfc-reference:`4251`, :rfc-reference:`4252`, + :rfc-reference:`4253` and :rfc-reference:`4254`. The primary working + implementation of the protocol is the `OpenSSH project + `_. Paramiko implements a large portion of the SSH + feature set, but there are occasional gaps. diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..69cb76019 --- /dev/null +++ b/codecov.yml @@ -0,0 +1 @@ +comment: false diff --git a/demos/demo.py b/demos/demo.py index fff61784a..c9b0a5f55 100755 --- a/demos/demo.py +++ b/demos/demo.py @@ -31,6 +31,7 @@ from paramiko.py3compat import input import paramiko + try: import interactive except ImportError: @@ -42,71 +43,73 @@ def agent_auth(transport, username): Attempt to authenticate to the given transport using any of the private keys available from an SSH agent. """ - + agent = paramiko.Agent() agent_keys = agent.get_keys() if len(agent_keys) == 0: return - + for key in agent_keys: - print('Trying ssh-agent key %s' % hexlify(key.get_fingerprint())) + print("Trying ssh-agent key %s" % hexlify(key.get_fingerprint())) try: transport.auth_publickey(username, key) - print('... success!') + print("... success!") return except paramiko.SSHException: - print('... nope.') + print("... nope.") def manual_auth(username, hostname): - default_auth = 'p' - auth = input('Auth by (p)assword, (r)sa key, or (d)ss key? [%s] ' % default_auth) + default_auth = "p" + auth = input( + "Auth by (p)assword, (r)sa key, or (d)ss key? [%s] " % default_auth + ) if len(auth) == 0: auth = default_auth - if auth == 'r': - default_path = os.path.join(os.environ['HOME'], '.ssh', 'id_rsa') - path = input('RSA key [%s]: ' % default_path) + if auth == "r": + default_path = os.path.join(os.environ["HOME"], ".ssh", "id_rsa") + path = input("RSA key [%s]: " % default_path) if len(path) == 0: path = default_path try: key = paramiko.RSAKey.from_private_key_file(path) except paramiko.PasswordRequiredException: - password = getpass.getpass('RSA key password: ') + password = getpass.getpass("RSA key password: ") key = paramiko.RSAKey.from_private_key_file(path, password) t.auth_publickey(username, key) - elif auth == 'd': - default_path = os.path.join(os.environ['HOME'], '.ssh', 'id_dsa') - path = input('DSS key [%s]: ' % default_path) + elif auth == "d": + default_path = os.path.join(os.environ["HOME"], ".ssh", "id_dsa") + path = input("DSS key [%s]: " % default_path) if len(path) == 0: path = default_path try: key = paramiko.DSSKey.from_private_key_file(path) except paramiko.PasswordRequiredException: - password = getpass.getpass('DSS key password: ') + password = getpass.getpass("DSS key password: ") key = paramiko.DSSKey.from_private_key_file(path, password) t.auth_publickey(username, key) else: - pw = getpass.getpass('Password for %s@%s: ' % (username, hostname)) + pw = getpass.getpass("Password for %s@%s: " % (username, hostname)) t.auth_password(username, pw) # setup logging -paramiko.util.log_to_file('demo.log') +paramiko.util.log_to_file("demo.log") -username = '' +username = "" if len(sys.argv) > 1: hostname = sys.argv[1] - if hostname.find('@') >= 0: - username, hostname = hostname.split('@') + if hostname.find("@") >= 0: + username, hostname = hostname.split("@") else: - hostname = input('Hostname: ') + hostname = input("Hostname: ") if len(hostname) == 0: - print('*** Hostname required.') + print("*** Hostname required.") sys.exit(1) port = 22 -if hostname.find(':') >= 0: - hostname, portstr = hostname.split(':') +if hostname.find(":") >= 0: + hostname, portstr = hostname.split(":") port = int(portstr) # now connect @@ -114,7 +117,7 @@ def manual_auth(username, hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((hostname, port)) except Exception as e: - print('*** Connect failed: ' + str(e)) + print("*** Connect failed: " + str(e)) traceback.print_exc() sys.exit(1) @@ -123,34 +126,38 @@ def manual_auth(username, hostname): try: t.start_client() except paramiko.SSHException: - print('*** SSH negotiation failed.') + print("*** SSH negotiation failed.") sys.exit(1) try: - keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts')) + keys = paramiko.util.load_host_keys( + os.path.expanduser("~/.ssh/known_hosts") + ) except IOError: try: - keys = paramiko.util.load_host_keys(os.path.expanduser('~/ssh/known_hosts')) + keys = paramiko.util.load_host_keys( + os.path.expanduser("~/ssh/known_hosts") + ) except IOError: - print('*** Unable to open host keys file') + print("*** Unable to open host keys file") keys = {} # check server's host key -- this is important. key = t.get_remote_server_key() if hostname not in keys: - print('*** WARNING: Unknown host key!') + print("*** WARNING: Unknown host key!") elif key.get_name() not in keys[hostname]: - print('*** WARNING: Unknown host key!') + print("*** WARNING: Unknown host key!") elif keys[hostname][key.get_name()] != key: - print('*** WARNING: Host key has changed!!!') + print("*** WARNING: Host key has changed!!!") sys.exit(1) else: - print('*** Host key OK.') + print("*** Host key OK.") # get username - if username == '': + if username == "": default_username = getpass.getuser() - username = input('Username [%s]: ' % default_username) + username = input("Username [%s]: " % default_username) if len(username) == 0: username = default_username @@ -158,25 +165,23 @@ def manual_auth(username, hostname): if not t.is_authenticated(): manual_auth(username, hostname) if not t.is_authenticated(): - print('*** Authentication failed. :(') + print("*** Authentication failed. :(") t.close() sys.exit(1) chan = t.open_session() chan.get_pty() chan.invoke_shell() - print('*** Here we go!\n') + print("*** Here we go!\n") interactive.interactive_shell(chan) chan.close() t.close() except Exception as e: - print('*** Caught exception: ' + str(e.__class__) + ': ' + str(e)) + print("*** Caught exception: " + str(e.__class__) + ": " + str(e)) traceback.print_exc() try: t.close() except: pass sys.exit(1) - - diff --git a/demos/demo_keygen.py b/demos/demo_keygen.py index 860ee4e9c..6a80272d1 100755 --- a/demos/demo_keygen.py +++ b/demos/demo_keygen.py @@ -28,62 +28,97 @@ from paramiko.ssh_exception import SSHException from paramiko.py3compat import u -usage=""" +usage = """ %prog [-v] [-b bits] -t type [-N new_passphrase] [-f output_keyfile]""" default_values = { "ktype": "dsa", "bits": 1024, "filename": "output", - "comment": "" + "comment": "", } -key_dispatch_table = { - 'dsa': DSSKey, - 'rsa': RSAKey, -} +key_dispatch_table = {"dsa": DSSKey, "rsa": RSAKey} + def progress(arg=None): if not arg: - sys.stdout.write('0%\x08\x08\x08 ') + sys.stdout.write("0%\x08\x08\x08 ") sys.stdout.flush() - elif arg[0] == 'p': - sys.stdout.write('25%\x08\x08\x08\x08 ') + elif arg[0] == "p": + sys.stdout.write("25%\x08\x08\x08\x08 ") sys.stdout.flush() - elif arg[0] == 'h': - sys.stdout.write('50%\x08\x08\x08\x08 ') + elif arg[0] == "h": + sys.stdout.write("50%\x08\x08\x08\x08 ") sys.stdout.flush() - elif arg[0] == 'x': - sys.stdout.write('75%\x08\x08\x08\x08 ') + elif arg[0] == "x": + sys.stdout.write("75%\x08\x08\x08\x08 ") sys.stdout.flush() -if __name__ == '__main__': - phrase=None - pfunc=None +if __name__ == "__main__": + + phrase = None + pfunc = None parser = OptionParser(usage=usage) - parser.add_option("-t", "--type", type="string", dest="ktype", + parser.add_option( + "-t", + "--type", + type="string", + dest="ktype", help="Specify type of key to create (dsa or rsa)", - metavar="ktype", default=default_values["ktype"]) - parser.add_option("-b", "--bits", type="int", dest="bits", - help="Number of bits in the key to create", metavar="bits", - default=default_values["bits"]) - parser.add_option("-N", "--new-passphrase", dest="newphrase", - help="Provide new passphrase", metavar="phrase") - parser.add_option("-P", "--old-passphrase", dest="oldphrase", - help="Provide old passphrase", metavar="phrase") - parser.add_option("-f", "--filename", type="string", dest="filename", - help="Filename of the key file", metavar="filename", - default=default_values["filename"]) - parser.add_option("-q", "--quiet", default=False, action="store_false", - help="Quiet") - parser.add_option("-v", "--verbose", default=False, action="store_true", - help="Verbose") - parser.add_option("-C", "--comment", type="string", dest="comment", - help="Provide a new comment", metavar="comment", - default=default_values["comment"]) + metavar="ktype", + default=default_values["ktype"], + ) + parser.add_option( + "-b", + "--bits", + type="int", + dest="bits", + help="Number of bits in the key to create", + metavar="bits", + default=default_values["bits"], + ) + parser.add_option( + "-N", + "--new-passphrase", + dest="newphrase", + help="Provide new passphrase", + metavar="phrase", + ) + parser.add_option( + "-P", + "--old-passphrase", + dest="oldphrase", + help="Provide old passphrase", + metavar="phrase", + ) + parser.add_option( + "-f", + "--filename", + type="string", + dest="filename", + help="Filename of the key file", + metavar="filename", + default=default_values["filename"], + ) + parser.add_option( + "-q", "--quiet", default=False, action="store_false", help="Quiet" + ) + parser.add_option( + "-v", "--verbose", default=False, action="store_true", help="Verbose" + ) + parser.add_option( + "-C", + "--comment", + type="string", + dest="comment", + help="Provide a new comment", + metavar="comment", + default=default_values["comment"], + ) (options, args) = parser.parse_args() @@ -95,18 +130,23 @@ def progress(arg=None): globals()[o] = getattr(options, o, default_values[o.lower()]) if options.newphrase: - phrase = getattr(options, 'newphrase') + phrase = getattr(options, "newphrase") if options.verbose: pfunc = progress - sys.stdout.write("Generating priv/pub %s %d bits key pair (%s/%s.pub)..." % (ktype, bits, filename, filename)) + sys.stdout.write( + "Generating priv/pub %s %d bits key pair (%s/%s.pub)..." + % (ktype, bits, filename, filename) + ) sys.stdout.flush() - if ktype == 'dsa' and bits > 1024: + if ktype == "dsa" and bits > 1024: raise SSHException("DSA Keys must be 1024 bits") if ktype not in key_dispatch_table: - raise SSHException("Unknown %s algorithm to generate keys pair" % ktype) + raise SSHException( + "Unknown %s algorithm to generate keys pair" % ktype + ) # generating private key prv = key_dispatch_table[ktype].generate(bits=bits, progress_func=pfunc) @@ -114,7 +154,7 @@ def progress(arg=None): # generating public key pub = key_dispatch_table[ktype](filename=filename, password=phrase) - with open("%s.pub" % filename, 'w') as f: + with open("%s.pub" % filename, "w") as f: f.write("%s %s" % (pub.get_name(), pub.get_base64())) if options.comment: f.write(" %s" % comment) @@ -123,4 +163,12 @@ def progress(arg=None): print("done.") hash = u(hexlify(pub.get_fingerprint())) - print("Fingerprint: %d %s %s.pub (%s)" % (bits, ":".join([ hash[i:2+i] for i in range(0, len(hash), 2)]), filename, ktype.upper())) + print( + "Fingerprint: %d %s %s.pub (%s)" + % ( + bits, + ":".join([hash[i : 2 + i] for i in range(0, len(hash), 2)]), + filename, + ktype.upper(), + ) + ) diff --git a/demos/demo_server.py b/demos/demo_server.py index c4af9b107..313e5fb27 100644 --- a/demos/demo_server.py +++ b/demos/demo_server.py @@ -31,45 +31,47 @@ # setup logging -paramiko.util.log_to_file('demo_server.log') +paramiko.util.log_to_file("demo_server.log") -host_key = paramiko.RSAKey(filename='test_rsa.key') -#host_key = paramiko.DSSKey(filename='test_dss.key') +host_key = paramiko.RSAKey(filename="test_rsa.key") +# host_key = paramiko.DSSKey(filename='test_dss.key') -print('Read key: ' + u(hexlify(host_key.get_fingerprint()))) +print("Read key: " + u(hexlify(host_key.get_fingerprint()))) -class Server (paramiko.ServerInterface): - # 'data' is the output of base64.encodestring(str(key)) +class Server(paramiko.ServerInterface): + # 'data' is the output of base64.b64encode(key) # (using the "user_rsa_key" files) - data = (b'AAAAB3NzaC1yc2EAAAABIwAAAIEAyO4it3fHlmGZWJaGrfeHOVY7RWO3P9M7hp' - b'fAu7jJ2d7eothvfeuoRFtJwhUmZDluRdFyhFY/hFAh76PJKGAusIqIQKlkJxMC' - b'KDqIexkgHAfID/6mqvmnSJf0b5W8v5h2pI/stOSwTQ+pxVhwJ9ctYDhRSlF0iT' - b'UWT10hcuO4Ks8=') + data = ( + b"AAAAB3NzaC1yc2EAAAABIwAAAIEAyO4it3fHlmGZWJaGrfeHOVY7RWO3P9M7hp" + b"fAu7jJ2d7eothvfeuoRFtJwhUmZDluRdFyhFY/hFAh76PJKGAusIqIQKlkJxMC" + b"KDqIexkgHAfID/6mqvmnSJf0b5W8v5h2pI/stOSwTQ+pxVhwJ9ctYDhRSlF0iT" + b"UWT10hcuO4Ks8=" + ) good_pub_key = paramiko.RSAKey(data=decodebytes(data)) def __init__(self): self.event = threading.Event() def check_channel_request(self, kind, chanid): - if kind == 'session': + if kind == "session": return paramiko.OPEN_SUCCEEDED return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED def check_auth_password(self, username, password): - if (username == 'robey') and (password == 'foo'): + if (username == "robey") and (password == "foo"): return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED def check_auth_publickey(self, username, key): - print('Auth attempt with key: ' + u(hexlify(key.get_fingerprint()))) - if (username == 'robey') and (key == self.good_pub_key): + print("Auth attempt with key: " + u(hexlify(key.get_fingerprint()))) + if (username == "robey") and (key == self.good_pub_key): return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED - - def check_auth_gssapi_with_mic(self, username, - gss_authenticated=paramiko.AUTH_FAILED, - cc_file=None): + + def check_auth_gssapi_with_mic( + self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None + ): """ .. note:: We are just checking in `AuthHandler` that the given user is a @@ -88,27 +90,26 @@ def check_auth_gssapi_with_mic(self, username, return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED - def check_auth_gssapi_keyex(self, username, - gss_authenticated=paramiko.AUTH_FAILED, - cc_file=None): + def check_auth_gssapi_keyex( + self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None + ): if gss_authenticated == paramiko.AUTH_SUCCESSFUL: return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED def enable_auth_gssapi(self): - UseGSSAPI = True - GSSAPICleanupCredentials = False - return UseGSSAPI + return True def get_allowed_auths(self, username): - return 'gssapi-keyex,gssapi-with-mic,password,publickey' + return "gssapi-keyex,gssapi-with-mic,password,publickey" def check_channel_shell_request(self, channel): self.event.set() return True - def check_channel_pty_request(self, channel, term, width, height, pixelwidth, - pixelheight, modes): + def check_channel_pty_request( + self, channel, term, width, height, pixelwidth, pixelheight, modes + ): return True @@ -118,22 +119,22 @@ def check_channel_pty_request(self, channel, term, width, height, pixelwidth, try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock.bind(('', 2200)) + sock.bind(("", 2200)) except Exception as e: - print('*** Bind failed: ' + str(e)) + print("*** Bind failed: " + str(e)) traceback.print_exc() sys.exit(1) try: sock.listen(100) - print('Listening for connection ...') + print("Listening for connection ...") client, addr = sock.accept() except Exception as e: - print('*** Listen/accept failed: ' + str(e)) + print("*** Listen/accept failed: " + str(e)) traceback.print_exc() sys.exit(1) -print('Got a connection!') +print("Got a connection!") try: t = paramiko.Transport(client, gss_kex=DoGSSAPIKeyExchange) @@ -141,43 +142,44 @@ def check_channel_pty_request(self, channel, term, width, height, pixelwidth, try: t.load_server_moduli() except: - print('(Failed to load moduli -- gex will be unsupported.)') + print("(Failed to load moduli -- gex will be unsupported.)") raise t.add_server_key(host_key) server = Server() try: t.start_server(server=server) except paramiko.SSHException: - print('*** SSH negotiation failed.') + print("*** SSH negotiation failed.") sys.exit(1) # wait for auth chan = t.accept(20) if chan is None: - print('*** No channel.') + print("*** No channel.") sys.exit(1) - print('Authenticated!') + print("Authenticated!") server.event.wait(10) if not server.event.is_set(): - print('*** Client never asked for a shell.') + print("*** Client never asked for a shell.") sys.exit(1) - chan.send('\r\n\r\nWelcome to my dorky little BBS!\r\n\r\n') - chan.send('We are on fire all the time! Hooray! Candy corn for everyone!\r\n') - chan.send('Happy birthday to Robot Dave!\r\n\r\n') - chan.send('Username: ') - f = chan.makefile('rU') - username = f.readline().strip('\r\n') - chan.send('\r\nI don\'t like you, ' + username + '.\r\n') + chan.send("\r\n\r\nWelcome to my dorky little BBS!\r\n\r\n") + chan.send( + "We are on fire all the time! Hooray! Candy corn for everyone!\r\n" + ) + chan.send("Happy birthday to Robot Dave!\r\n\r\n") + chan.send("Username: ") + f = chan.makefile("rU") + username = f.readline().strip("\r\n") + chan.send("\r\nI don't like you, " + username + ".\r\n") chan.close() except Exception as e: - print('*** Caught exception: ' + str(e.__class__) + ': ' + str(e)) + print("*** Caught exception: " + str(e.__class__) + ": " + str(e)) traceback.print_exc() try: t.close() except: pass sys.exit(1) - diff --git a/demos/demo_sftp.py b/demos/demo_sftp.py index 2cb44701d..7f6a002ed 100644 --- a/demos/demo_sftp.py +++ b/demos/demo_sftp.py @@ -32,38 +32,38 @@ # setup logging -paramiko.util.log_to_file('demo_sftp.log') +paramiko.util.log_to_file("demo_sftp.log") # Paramiko client configuration -UseGSSAPI = True # enable GSS-API / SSPI authentication +UseGSSAPI = True # enable GSS-API / SSPI authentication DoGSSAPIKeyExchange = True Port = 22 # get hostname -username = '' +username = "" if len(sys.argv) > 1: hostname = sys.argv[1] - if hostname.find('@') >= 0: - username, hostname = hostname.split('@') + if hostname.find("@") >= 0: + username, hostname = hostname.split("@") else: - hostname = input('Hostname: ') + hostname = input("Hostname: ") if len(hostname) == 0: - print('*** Hostname required.') + print("*** Hostname required.") sys.exit(1) -if hostname.find(':') >= 0: - hostname, portstr = hostname.split(':') +if hostname.find(":") >= 0: + hostname, portstr = hostname.split(":") Port = int(portstr) # get username -if username == '': +if username == "": default_username = getpass.getuser() - username = input('Username [%s]: ' % default_username) + username = input("Username [%s]: " % default_username) if len(username) == 0: username = default_username if not UseGSSAPI: - password = getpass.getpass('Password for %s@%s: ' % (username, hostname)) + password = getpass.getpass("Password for %s@%s: " % (username, hostname)) else: password = None @@ -72,59 +72,69 @@ hostkeytype = None hostkey = None try: - host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts')) + host_keys = paramiko.util.load_host_keys( + os.path.expanduser("~/.ssh/known_hosts") + ) except IOError: try: # try ~/ssh/ too, because windows can't have a folder named ~/.ssh/ - host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/ssh/known_hosts')) + host_keys = paramiko.util.load_host_keys( + os.path.expanduser("~/ssh/known_hosts") + ) except IOError: - print('*** Unable to open host keys file') + print("*** Unable to open host keys file") host_keys = {} if hostname in host_keys: hostkeytype = host_keys[hostname].keys()[0] hostkey = host_keys[hostname][hostkeytype] - print('Using host key of type %s' % hostkeytype) + print("Using host key of type %s" % hostkeytype) # now, connect and use paramiko Transport to negotiate SSH2 across the connection try: t = paramiko.Transport((hostname, Port)) - t.connect(hostkey, username, password, gss_host=socket.getfqdn(hostname), - gss_auth=UseGSSAPI, gss_kex=DoGSSAPIKeyExchange) + t.connect( + hostkey, + username, + password, + gss_host=socket.getfqdn(hostname), + gss_auth=UseGSSAPI, + gss_kex=DoGSSAPIKeyExchange, + ) sftp = paramiko.SFTPClient.from_transport(t) # dirlist on remote host - dirlist = sftp.listdir('.') + dirlist = sftp.listdir(".") print("Dirlist: %s" % dirlist) # copy this demo onto the server try: sftp.mkdir("demo_sftp_folder") except IOError: - print('(assuming demo_sftp_folder/ already exists)') - with sftp.open('demo_sftp_folder/README', 'w') as f: - f.write('This was created by demo_sftp.py.\n') - with open('demo_sftp.py', 'r') as f: + print("(assuming demo_sftp_folder/ already exists)") + with sftp.open("demo_sftp_folder/README", "w") as f: + f.write("This was created by demo_sftp.py.\n") + with open("demo_sftp.py", "r") as f: data = f.read() - sftp.open('demo_sftp_folder/demo_sftp.py', 'w').write(data) - print('created demo_sftp_folder/ on the server') - + sftp.open("demo_sftp_folder/demo_sftp.py", "w").write(data) + print("created demo_sftp_folder/ on the server") + # copy the README back here - with sftp.open('demo_sftp_folder/README', 'r') as f: + with sftp.open("demo_sftp_folder/README", "r") as f: data = f.read() - with open('README_demo_sftp', 'w') as f: + with open("README_demo_sftp", "w") as f: f.write(data) - print('copied README back here') - + print("copied README back here") + # BETTER: use the get() and put() methods - sftp.put('demo_sftp.py', 'demo_sftp_folder/demo_sftp.py') - sftp.get('demo_sftp_folder/README', 'README_demo_sftp') + sftp.put("demo_sftp.py", "demo_sftp_folder/demo_sftp.py") + sftp.get("demo_sftp_folder/README", "README_demo_sftp") t.close() except Exception as e: - print('*** Caught exception: %s: %s' % (e.__class__, e)) + print("*** Caught exception: %s: %s" % (e.__class__, e)) traceback.print_exc() try: t.close() diff --git a/demos/demo_simple.py b/demos/demo_simple.py old mode 100755 new mode 100644 index 3a17988c0..5dd4f6c16 --- a/demos/demo_simple.py +++ b/demos/demo_simple.py @@ -28,6 +28,7 @@ from paramiko.py3compat import input import paramiko + try: import interactive except ImportError: @@ -35,37 +36,43 @@ # setup logging -paramiko.util.log_to_file('demo_simple.log') +paramiko.util.log_to_file("demo_simple.log") # Paramiko client configuration -UseGSSAPI = True # enable GSS-API / SSPI authentication -DoGSSAPIKeyExchange = True +UseGSSAPI = ( + paramiko.GSS_AUTH_AVAILABLE +) # enable "gssapi-with-mic" authentication, if supported by your python installation +DoGSSAPIKeyExchange = ( + paramiko.GSS_AUTH_AVAILABLE +) # enable "gssapi-kex" key exchange, if supported by your python installation +# UseGSSAPI = False +# DoGSSAPIKeyExchange = False port = 22 # get hostname -username = '' +username = "" if len(sys.argv) > 1: hostname = sys.argv[1] - if hostname.find('@') >= 0: - username, hostname = hostname.split('@') + if hostname.find("@") >= 0: + username, hostname = hostname.split("@") else: - hostname = input('Hostname: ') + hostname = input("Hostname: ") if len(hostname) == 0: - print('*** Hostname required.') + print("*** Hostname required.") sys.exit(1) -if hostname.find(':') >= 0: - hostname, portstr = hostname.split(':') +if hostname.find(":") >= 0: + hostname, portstr = hostname.split(":") port = int(portstr) # get username -if username == '': +if username == "": default_username = getpass.getuser() - username = input('Username [%s]: ' % default_username) + username = input("Username [%s]: " % default_username) if len(username) == 0: username = default_username -if not UseGSSAPI or (not UseGSSAPI and not DoGSSAPIKeyExchange): - password = getpass.getpass('Password for %s@%s: ' % (username, hostname)) +if not UseGSSAPI and not DoGSSAPIKeyExchange: + password = getpass.getpass("Password for %s@%s: " % (username, hostname)) # now, connect and use paramiko Client to negotiate SSH2 across the connection @@ -73,28 +80,34 @@ client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) - print('*** Connecting...') - if not UseGSSAPI or (not UseGSSAPI and not DoGSSAPIKeyExchange): + print("*** Connecting...") + if not UseGSSAPI and not DoGSSAPIKeyExchange: client.connect(hostname, port, username, password) else: - # SSPI works only with the FQDN of the target host - hostname = socket.getfqdn(hostname) try: - client.connect(hostname, port, username, gss_auth=UseGSSAPI, - gss_kex=DoGSSAPIKeyExchange) + client.connect( + hostname, + port, + username, + gss_auth=UseGSSAPI, + gss_kex=DoGSSAPIKeyExchange, + ) except Exception: - password = getpass.getpass('Password for %s@%s: ' % (username, hostname)) + # traceback.print_exc() + password = getpass.getpass( + "Password for %s@%s: " % (username, hostname) + ) client.connect(hostname, port, username, password) chan = client.invoke_shell() print(repr(client.get_transport())) - print('*** Here we go!\n') + print("*** Here we go!\n") interactive.interactive_shell(chan) chan.close() client.close() except Exception as e: - print('*** Caught exception: %s: %s' % (e.__class__, e)) + print("*** Caught exception: %s: %s" % (e.__class__, e)) traceback.print_exc() try: client.close() diff --git a/demos/forward.py b/demos/forward.py index 96e1700d5..cd9dabf1c 100644 --- a/demos/forward.py +++ b/demos/forward.py @@ -30,6 +30,7 @@ import os import socket import select + try: import SocketServer except ImportError: @@ -46,30 +47,40 @@ g_verbose = True -class ForwardServer (SocketServer.ThreadingTCPServer): +class ForwardServer(SocketServer.ThreadingTCPServer): daemon_threads = True allow_reuse_address = True - -class Handler (SocketServer.BaseRequestHandler): +class Handler(SocketServer.BaseRequestHandler): def handle(self): try: - chan = self.ssh_transport.open_channel('direct-tcpip', - (self.chain_host, self.chain_port), - self.request.getpeername()) + chan = self.ssh_transport.open_channel( + "direct-tcpip", + (self.chain_host, self.chain_port), + self.request.getpeername(), + ) except Exception as e: - verbose('Incoming request to %s:%d failed: %s' % (self.chain_host, - self.chain_port, - repr(e))) + verbose( + "Incoming request to %s:%d failed: %s" + % (self.chain_host, self.chain_port, repr(e)) + ) return if chan is None: - verbose('Incoming request to %s:%d was rejected by the SSH server.' % - (self.chain_host, self.chain_port)) + verbose( + "Incoming request to %s:%d was rejected by the SSH server." + % (self.chain_host, self.chain_port) + ) return - verbose('Connected! Tunnel open %r -> %r -> %r' % (self.request.getpeername(), - chan.getpeername(), (self.chain_host, self.chain_port))) + verbose( + "Connected! Tunnel open %r -> %r -> %r" + % ( + self.request.getpeername(), + chan.getpeername(), + (self.chain_host, self.chain_port), + ) + ) while True: r, w, x = select.select([self.request, chan], [], []) if self.request in r: @@ -82,22 +93,23 @@ def handle(self): if len(data) == 0: break self.request.send(data) - + peername = self.request.getpeername() chan.close() self.request.close() - verbose('Tunnel closed from %r' % (peername,)) + verbose("Tunnel closed from %r" % (peername,)) def forward_tunnel(local_port, remote_host, remote_port, transport): # this is a little convoluted, but lets me configure things for the Handler # object. (SocketServer doesn't give Handlers any way to access the outer # server normally.) - class SubHander (Handler): + class SubHander(Handler): chain_host = remote_host chain_port = remote_port ssh_transport = transport - ForwardServer(('', local_port), SubHander).serve_forever() + + ForwardServer(("", local_port), SubHander).serve_forever() def verbose(s): @@ -114,40 +126,88 @@ def verbose(s): def get_host_port(spec, default_port): "parse 'hostname:22' into a host and port, with the port optional" - args = (spec.split(':', 1) + [default_port])[:2] + args = (spec.split(":", 1) + [default_port])[:2] args[1] = int(args[1]) return args[0], args[1] def parse_options(): global g_verbose - - parser = OptionParser(usage='usage: %prog [options] [:]', - version='%prog 1.0', description=HELP) - parser.add_option('-q', '--quiet', action='store_false', dest='verbose', default=True, - help='squelch all informational output') - parser.add_option('-p', '--local-port', action='store', type='int', dest='port', - default=DEFAULT_PORT, - help='local port to forward (default: %d)' % DEFAULT_PORT) - parser.add_option('-u', '--user', action='store', type='string', dest='user', - default=getpass.getuser(), - help='username for SSH authentication (default: %s)' % getpass.getuser()) - parser.add_option('-K', '--key', action='store', type='string', dest='keyfile', - default=None, - help='private key file to use for SSH authentication') - parser.add_option('', '--no-key', action='store_false', dest='look_for_keys', default=True, - help='don\'t look for or use a private key file') - parser.add_option('-P', '--password', action='store_true', dest='readpass', default=False, - help='read password (for key or password auth) from stdin') - parser.add_option('-r', '--remote', action='store', type='string', dest='remote', default=None, metavar='host:port', - help='remote host and port to forward to') + + parser = OptionParser( + usage="usage: %prog [options] [:]", + version="%prog 1.0", + description=HELP, + ) + parser.add_option( + "-q", + "--quiet", + action="store_false", + dest="verbose", + default=True, + help="squelch all informational output", + ) + parser.add_option( + "-p", + "--local-port", + action="store", + type="int", + dest="port", + default=DEFAULT_PORT, + help="local port to forward (default: %d)" % DEFAULT_PORT, + ) + parser.add_option( + "-u", + "--user", + action="store", + type="string", + dest="user", + default=getpass.getuser(), + help="username for SSH authentication (default: %s)" + % getpass.getuser(), + ) + parser.add_option( + "-K", + "--key", + action="store", + type="string", + dest="keyfile", + default=None, + help="private key file to use for SSH authentication", + ) + parser.add_option( + "", + "--no-key", + action="store_false", + dest="look_for_keys", + default=True, + help="don't look for or use a private key file", + ) + parser.add_option( + "-P", + "--password", + action="store_true", + dest="readpass", + default=False, + help="read password (for key or password auth) from stdin", + ) + parser.add_option( + "-r", + "--remote", + action="store", + type="string", + dest="remote", + default=None, + metavar="host:port", + help="remote host and port to forward to", + ) options, args = parser.parse_args() if len(args) != 1: - parser.error('Incorrect number of arguments.') + parser.error("Incorrect number of arguments.") if options.remote is None: - parser.error('Remote address required (-r).') - + parser.error("Remote address required (-r).") + g_verbose = options.verbose server_host, server_port = get_host_port(args[0], SSH_PORT) remote_host, remote_port = get_host_port(options.remote, SSH_PORT) @@ -156,31 +216,42 @@ def parse_options(): def main(): options, server, remote = parse_options() - + password = None if options.readpass: - password = getpass.getpass('Enter SSH password: ') - + password = getpass.getpass("Enter SSH password: ") + client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) - verbose('Connecting to ssh host %s:%d ...' % (server[0], server[1])) + verbose("Connecting to ssh host %s:%d ..." % (server[0], server[1])) try: - client.connect(server[0], server[1], username=options.user, key_filename=options.keyfile, - look_for_keys=options.look_for_keys, password=password) + client.connect( + server[0], + server[1], + username=options.user, + key_filename=options.keyfile, + look_for_keys=options.look_for_keys, + password=password, + ) except Exception as e: - print('*** Failed to connect to %s:%d: %r' % (server[0], server[1], e)) + print("*** Failed to connect to %s:%d: %r" % (server[0], server[1], e)) sys.exit(1) - verbose('Now forwarding port %d to %s:%d ...' % (options.port, remote[0], remote[1])) + verbose( + "Now forwarding port %d to %s:%d ..." + % (options.port, remote[0], remote[1]) + ) try: - forward_tunnel(options.port, remote[0], remote[1], client.get_transport()) + forward_tunnel( + options.port, remote[0], remote[1], client.get_transport() + ) except KeyboardInterrupt: - print('C-c: Port forwarding stopped.') + print("C-c: Port forwarding stopped.") sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/demos/interactive.py b/demos/interactive.py index 7138cd6c9..037787c44 100644 --- a/demos/interactive.py +++ b/demos/interactive.py @@ -25,6 +25,7 @@ try: import termios import tty + has_termios = True except ImportError: has_termios = False @@ -39,7 +40,7 @@ def interactive_shell(chan): def posix_shell(chan): import select - + oldtty = termios.tcgetattr(sys.stdin) try: tty.setraw(sys.stdin.fileno()) @@ -52,7 +53,7 @@ def posix_shell(chan): try: x = u(chan.recv(1024)) if len(x) == 0: - sys.stdout.write('\r\n*** EOF\r\n') + sys.stdout.write("\r\n*** EOF\r\n") break sys.stdout.write(x) sys.stdout.flush() @@ -67,26 +68,28 @@ def posix_shell(chan): finally: termios.tcsetattr(sys.stdin, termios.TCSADRAIN, oldtty) - + # thanks to Mike Looijmans for this code def windows_shell(chan): import threading - sys.stdout.write("Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n") - + sys.stdout.write( + "Line-buffered terminal emulation. Press F6 or ^Z to send EOF.\r\n\r\n" + ) + def writeall(sock): while True: data = sock.recv(256) if not data: - sys.stdout.write('\r\n*** EOF ***\r\n\r\n') + sys.stdout.write("\r\n*** EOF ***\r\n\r\n") sys.stdout.flush() break sys.stdout.write(data) sys.stdout.flush() - + writer = threading.Thread(target=writeall, args=(chan,)) writer.start() - + try: while True: d = sys.stdin.read(1) diff --git a/demos/rforward.py b/demos/rforward.py index ae70670ce..a2e8a7761 100755 --- a/demos/rforward.py +++ b/demos/rforward.py @@ -47,11 +47,13 @@ def handler(chan, host, port): try: sock.connect((host, port)) except Exception as e: - verbose('Forwarding request to %s:%d failed: %r' % (host, port, e)) + verbose("Forwarding request to %s:%d failed: %r" % (host, port, e)) return - - verbose('Connected! Tunnel open %r -> %r -> %r' % (chan.origin_addr, - chan.getpeername(), (host, port))) + + verbose( + "Connected! Tunnel open %r -> %r -> %r" + % (chan.origin_addr, chan.getpeername(), (host, port)) + ) while True: r, w, x = select.select([sock, chan], [], []) if sock in r: @@ -66,16 +68,18 @@ def handler(chan, host, port): sock.send(data) chan.close() sock.close() - verbose('Tunnel closed from %r' % (chan.origin_addr,)) + verbose("Tunnel closed from %r" % (chan.origin_addr,)) def reverse_forward_tunnel(server_port, remote_host, remote_port, transport): - transport.request_port_forward('', server_port) + transport.request_port_forward("", server_port) while True: chan = transport.accept(1000) if chan is None: continue - thr = threading.Thread(target=handler, args=(chan, remote_host, remote_port)) + thr = threading.Thread( + target=handler, args=(chan, remote_host, remote_port) + ) thr.setDaemon(True) thr.start() @@ -95,40 +99,88 @@ def verbose(s): def get_host_port(spec, default_port): "parse 'hostname:22' into a host and port, with the port optional" - args = (spec.split(':', 1) + [default_port])[:2] + args = (spec.split(":", 1) + [default_port])[:2] args[1] = int(args[1]) return args[0], args[1] def parse_options(): global g_verbose - - parser = OptionParser(usage='usage: %prog [options] [:]', - version='%prog 1.0', description=HELP) - parser.add_option('-q', '--quiet', action='store_false', dest='verbose', default=True, - help='squelch all informational output') - parser.add_option('-p', '--remote-port', action='store', type='int', dest='port', - default=DEFAULT_PORT, - help='port on server to forward (default: %d)' % DEFAULT_PORT) - parser.add_option('-u', '--user', action='store', type='string', dest='user', - default=getpass.getuser(), - help='username for SSH authentication (default: %s)' % getpass.getuser()) - parser.add_option('-K', '--key', action='store', type='string', dest='keyfile', - default=None, - help='private key file to use for SSH authentication') - parser.add_option('', '--no-key', action='store_false', dest='look_for_keys', default=True, - help='don\'t look for or use a private key file') - parser.add_option('-P', '--password', action='store_true', dest='readpass', default=False, - help='read password (for key or password auth) from stdin') - parser.add_option('-r', '--remote', action='store', type='string', dest='remote', default=None, metavar='host:port', - help='remote host and port to forward to') + + parser = OptionParser( + usage="usage: %prog [options] [:]", + version="%prog 1.0", + description=HELP, + ) + parser.add_option( + "-q", + "--quiet", + action="store_false", + dest="verbose", + default=True, + help="squelch all informational output", + ) + parser.add_option( + "-p", + "--remote-port", + action="store", + type="int", + dest="port", + default=DEFAULT_PORT, + help="port on server to forward (default: %d)" % DEFAULT_PORT, + ) + parser.add_option( + "-u", + "--user", + action="store", + type="string", + dest="user", + default=getpass.getuser(), + help="username for SSH authentication (default: %s)" + % getpass.getuser(), + ) + parser.add_option( + "-K", + "--key", + action="store", + type="string", + dest="keyfile", + default=None, + help="private key file to use for SSH authentication", + ) + parser.add_option( + "", + "--no-key", + action="store_false", + dest="look_for_keys", + default=True, + help="don't look for or use a private key file", + ) + parser.add_option( + "-P", + "--password", + action="store_true", + dest="readpass", + default=False, + help="read password (for key or password auth) from stdin", + ) + parser.add_option( + "-r", + "--remote", + action="store", + type="string", + dest="remote", + default=None, + metavar="host:port", + help="remote host and port to forward to", + ) options, args = parser.parse_args() if len(args) != 1: - parser.error('Incorrect number of arguments.') + parser.error("Incorrect number of arguments.") if options.remote is None: - parser.error('Remote address required (-r).') - + parser.error("Remote address required (-r).") + g_verbose = options.verbose server_host, server_port = get_host_port(args[0], SSH_PORT) remote_host, remote_port = get_host_port(options.remote, SSH_PORT) @@ -137,31 +189,42 @@ def parse_options(): def main(): options, server, remote = parse_options() - + password = None if options.readpass: - password = getpass.getpass('Enter SSH password: ') - + password = getpass.getpass("Enter SSH password: ") + client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) - verbose('Connecting to ssh host %s:%d ...' % (server[0], server[1])) + verbose("Connecting to ssh host %s:%d ..." % (server[0], server[1])) try: - client.connect(server[0], server[1], username=options.user, key_filename=options.keyfile, - look_for_keys=options.look_for_keys, password=password) + client.connect( + server[0], + server[1], + username=options.user, + key_filename=options.keyfile, + look_for_keys=options.look_for_keys, + password=password, + ) except Exception as e: - print('*** Failed to connect to %s:%d: %r' % (server[0], server[1], e)) + print("*** Failed to connect to %s:%d: %r" % (server[0], server[1], e)) sys.exit(1) - verbose('Now forwarding remote port %d to %s:%d ...' % (options.port, remote[0], remote[1])) + verbose( + "Now forwarding remote port %d to %s:%d ..." + % (options.port, remote[0], remote[1]) + ) try: - reverse_forward_tunnel(options.port, remote[0], remote[1], client.get_transport()) + reverse_forward_tunnel( + options.port, remote[0], remote[1], client.get_transport() + ) except KeyboardInterrupt: - print('C-c: Port forwarding stopped.') + print("C-c: Port forwarding stopped.") sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/dev-requirements.txt b/dev-requirements.txt index 2547fb5fe..3edcc812f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,11 +1,23 @@ -# Older junk -tox>=1.4,<1.5 -# For newer tasks like building Sphinx docs. -invoke>=0.13,<2.0 -invocations>=0.13,<2.0 -sphinx>=1.1.3,<2.0 -alabaster>=0.7.5,<2.0 -releases>=1.1.0,<2.0 -semantic_version<3.0 -wheel==0.24 -twine==1.5 +# Invocations for common project tasks +invoke==1.6.0 +invocations==2.3.0 +pytest==4.4.2 +pytest-relaxed==1.1.5 +# pytest-xdist for test dir watching and the inv guard task +pytest-xdist==1.28.0 +mock==2.0.0 +# Linting! +flake8==3.8.3 +# Formatting! +black==18.6b4 +# Coverage! +coverage==4.5.4 +codecov==2.1.11 +# Documentation tools +sphinx>=1.4,<1.7 +alabaster==0.7.12 +releases>=1.5,<2.0 +# Release tools +semantic_version>=2.4,<2.5 +# Self +-e ".[ed25519,invoke]" diff --git a/paramiko/__init__.py b/paramiko/__init__.py index 9e2ba013c..5318cc9c5 100644 --- a/paramiko/__init__.py +++ b/paramiko/__init__.py @@ -16,29 +16,42 @@ # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. +# flake8: noqa import sys from paramiko._version import __version__, __version_info__ - -if sys.version_info < (2, 6): - raise RuntimeError('You need Python 2.6+ for this module.') - - -__author__ = "Jeff Forcier " -__license__ = "GNU Lesser General Public License (LGPL)" - - from paramiko.transport import SecurityOptions, Transport -from paramiko.client import SSHClient, MissingHostKeyPolicy, AutoAddPolicy, RejectPolicy, WarningPolicy +from paramiko.client import ( + SSHClient, + MissingHostKeyPolicy, + AutoAddPolicy, + RejectPolicy, + WarningPolicy, +) from paramiko.auth_handler import AuthHandler -from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE -from paramiko.channel import Channel, ChannelFile -from paramiko.ssh_exception import SSHException, PasswordRequiredException, \ - BadAuthenticationType, ChannelException, BadHostKeyException, \ - AuthenticationException, ProxyCommandFailure +from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE, GSS_EXCEPTIONS +from paramiko.channel import ( + Channel, + ChannelFile, + ChannelStderrFile, + ChannelStdinFile, +) +from paramiko.ssh_exception import ( + AuthenticationException, + BadAuthenticationType, + BadHostKeyException, + ChannelException, + ConfigParseError, + CouldNotCanonicalize, + IncompatiblePeer, + PasswordRequiredException, + ProxyCommandFailure, + SSHException, +) from paramiko.server import ServerInterface, SubsystemHandler, InteractiveQuery from paramiko.rsakey import RSAKey from paramiko.dsskey import DSSKey from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key from paramiko.sftp import SFTPError, BaseSFTP from paramiko.sftp_client import SFTP, SFTPClient from paramiko.sftp_server import SFTPServer @@ -50,54 +63,81 @@ from paramiko.packet import Packetizer from paramiko.file import BufferedFile from paramiko.agent import Agent, AgentKey -from paramiko.pkey import PKey +from paramiko.pkey import PKey, PublicBlob from paramiko.hostkeys import HostKeys -from paramiko.config import SSHConfig +from paramiko.config import SSHConfig, SSHConfigDict from paramiko.proxy import ProxyCommand -from paramiko.common import AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED, \ - OPEN_SUCCEEDED, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_FAILED_CONNECT_FAILED, \ - OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, OPEN_FAILED_RESOURCE_SHORTAGE +from paramiko.common import ( + AUTH_SUCCESSFUL, + AUTH_PARTIALLY_SUCCESSFUL, + AUTH_FAILED, + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) -from paramiko.sftp import SFTP_OK, SFTP_EOF, SFTP_NO_SUCH_FILE, SFTP_PERMISSION_DENIED, SFTP_FAILURE, \ - SFTP_BAD_MESSAGE, SFTP_NO_CONNECTION, SFTP_CONNECTION_LOST, SFTP_OP_UNSUPPORTED +from paramiko.sftp import ( + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) from paramiko.common import io_sleep -__all__ = [ 'Transport', - 'SSHClient', - 'MissingHostKeyPolicy', - 'AutoAddPolicy', - 'RejectPolicy', - 'WarningPolicy', - 'SecurityOptions', - 'SubsystemHandler', - 'Channel', - 'PKey', - 'RSAKey', - 'DSSKey', - 'Message', - 'SSHException', - 'AuthenticationException', - 'PasswordRequiredException', - 'BadAuthenticationType', - 'ChannelException', - 'BadHostKeyException', - 'ProxyCommand', - 'ProxyCommandFailure', - 'SFTP', - 'SFTPFile', - 'SFTPHandle', - 'SFTPClient', - 'SFTPServer', - 'SFTPError', - 'SFTPAttributes', - 'SFTPServerInterface', - 'ServerInterface', - 'BufferedFile', - 'Agent', - 'AgentKey', - 'HostKeys', - 'SSHConfig', - 'util', - 'io_sleep' ] + +__author__ = "Jeff Forcier " +__license__ = "GNU Lesser General Public License (LGPL)" + +__all__ = [ + "Agent", + "AgentKey", + "AuthenticationException", + "AutoAddPolicy", + "BadAuthenticationType", + "BadHostKeyException", + "BufferedFile", + "Channel", + "ChannelException", + "ConfigParseError", + "CouldNotCanonicalize", + "DSSKey", + "ECDSAKey", + "Ed25519Key", + "HostKeys", + "Message", + "MissingHostKeyPolicy", + "PKey", + "PasswordRequiredException", + "ProxyCommand", + "ProxyCommandFailure", + "RSAKey", + "RejectPolicy", + "SFTP", + "SFTPAttributes", + "SFTPClient", + "SFTPError", + "SFTPFile", + "SFTPHandle", + "SFTPServer", + "SFTPServerInterface", + "SSHClient", + "SSHConfig", + "SSHConfigDict", + "SSHException", + "SecurityOptions", + "ServerInterface", + "SubsystemHandler", + "Transport", + "WarningPolicy", + "io_sleep", + "util", +] diff --git a/paramiko/_version.py b/paramiko/_version.py index c07c0b075..e955fe734 100644 --- a/paramiko/_version.py +++ b/paramiko/_version.py @@ -1,2 +1,2 @@ -__version_info__ = (2, 0, 2) -__version__ = '.'.join(map(str, __version_info__)) +__version_info__ = (2, 10, 1) +__version__ = ".".join(map(str, __version_info__)) diff --git a/paramiko/_winapi.py b/paramiko/_winapi.py index 94bf6017e..df02b1143 100644 --- a/paramiko/_winapi.py +++ b/paramiko/_winapi.py @@ -15,6 +15,7 @@ ###################### # jaraco.windows.error + def format_system_message(errno): """ Call FormatMessage with a system error number to retrieve @@ -41,7 +42,7 @@ def format_system_message(errno): ctypes.byref(result_buffer), buffer_size, arguments, - ) + ) # note the following will cause an infinite loop if GetLastError # repeatedly returns an error that cannot be formatted, although # this should not happen. @@ -52,13 +53,14 @@ def format_system_message(errno): class WindowsError(builtins.WindowsError): - "more info about errors at http://msdn.microsoft.com/en-us/library/ms681381(VS.85).aspx" + """more info about errors at + http://msdn.microsoft.com/en-us/library/ms681381(VS.85).aspx""" def __init__(self, value=None): if value is None: value = ctypes.windll.kernel32.GetLastError() strerror = format_system_message(value) - if sys.version_info > (3,3): + if sys.version_info > (3, 3): args = 0, strerror, None, value else: args = value, strerror @@ -76,7 +78,8 @@ def __str__(self): return self.message def __repr__(self): - return '{self.__class__.__name__}({self.winerror})'.format(**vars()) + return "{self.__class__.__name__}({self.winerror})".format(**vars()) + def handle_nonzero_success(result): if result == 0: @@ -89,19 +92,19 @@ def handle_nonzero_success(result): GMEM_MOVEABLE = 0x2 GlobalAlloc = ctypes.windll.kernel32.GlobalAlloc -GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_ssize_t +GlobalAlloc.argtypes = ctypes.wintypes.UINT, ctypes.c_size_t GlobalAlloc.restype = ctypes.wintypes.HANDLE GlobalLock = ctypes.windll.kernel32.GlobalLock -GlobalLock.argtypes = ctypes.wintypes.HGLOBAL, +GlobalLock.argtypes = (ctypes.wintypes.HGLOBAL,) GlobalLock.restype = ctypes.wintypes.LPVOID GlobalUnlock = ctypes.windll.kernel32.GlobalUnlock -GlobalUnlock.argtypes = ctypes.wintypes.HGLOBAL, +GlobalUnlock.argtypes = (ctypes.wintypes.HGLOBAL,) GlobalUnlock.restype = ctypes.wintypes.BOOL GlobalSize = ctypes.windll.kernel32.GlobalSize -GlobalSize.argtypes = ctypes.wintypes.HGLOBAL, +GlobalSize.argtypes = (ctypes.wintypes.HGLOBAL,) GlobalSize.restype = ctypes.c_size_t CreateFileMapping = ctypes.windll.kernel32.CreateFileMappingW @@ -119,24 +122,22 @@ def handle_nonzero_success(result): MapViewOfFile.restype = ctypes.wintypes.HANDLE UnmapViewOfFile = ctypes.windll.kernel32.UnmapViewOfFile -UnmapViewOfFile.argtypes = ctypes.wintypes.HANDLE, +UnmapViewOfFile.argtypes = (ctypes.wintypes.HANDLE,) RtlMoveMemory = ctypes.windll.kernel32.RtlMoveMemory -RtlMoveMemory.argtypes = ( - ctypes.c_void_p, - ctypes.c_void_p, - ctypes.c_size_t, -) +RtlMoveMemory.argtypes = (ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t) -ctypes.windll.kernel32.LocalFree.argtypes = ctypes.wintypes.HLOCAL, +ctypes.windll.kernel32.LocalFree.argtypes = (ctypes.wintypes.HLOCAL,) ##################### # jaraco.windows.mmap + class MemoryMap(object): """ A memory map object which can have security attributes overridden. """ + def __init__(self, name, length, security_attributes=None): self.name = name self.length = length @@ -146,14 +147,20 @@ def __init__(self, name, length, security_attributes=None): def __enter__(self): p_SA = ( ctypes.byref(self.security_attributes) - if self.security_attributes else None + if self.security_attributes + else None ) INVALID_HANDLE_VALUE = -1 PAGE_READWRITE = 0x4 FILE_MAP_WRITE = 0x2 filemap = ctypes.windll.kernel32.CreateFileMappingW( - INVALID_HANDLE_VALUE, p_SA, PAGE_READWRITE, 0, self.length, - u(self.name)) + INVALID_HANDLE_VALUE, + p_SA, + PAGE_READWRITE, + 0, + self.length, + u(self.name), + ) handle_nonzero_success(filemap) if filemap == INVALID_HANDLE_VALUE: raise Exception("Failed to create file mapping") @@ -189,6 +196,7 @@ def __exit__(self, exc_type, exc_val, tb): ctypes.windll.kernel32.UnmapViewOfFile(self.view) ctypes.windll.kernel32.CloseHandle(self.filemap) + ############################# # jaraco.windows.api.security @@ -216,53 +224,60 @@ def __exit__(self, exc_type, exc_val, tb): POLICY_NOTIFICATION = 0x00001000 POLICY_ALL_ACCESS = ( - STANDARD_RIGHTS_REQUIRED | - POLICY_VIEW_LOCAL_INFORMATION | - POLICY_VIEW_AUDIT_INFORMATION | - POLICY_GET_PRIVATE_INFORMATION | - POLICY_TRUST_ADMIN | - POLICY_CREATE_ACCOUNT | - POLICY_CREATE_SECRET | - POLICY_CREATE_PRIVILEGE | - POLICY_SET_DEFAULT_QUOTA_LIMITS | - POLICY_SET_AUDIT_REQUIREMENTS | - POLICY_AUDIT_LOG_ADMIN | - POLICY_SERVER_ADMIN | - POLICY_LOOKUP_NAMES) + STANDARD_RIGHTS_REQUIRED + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN + | POLICY_LOOKUP_NAMES +) POLICY_READ = ( - STANDARD_RIGHTS_READ | - POLICY_VIEW_AUDIT_INFORMATION | - POLICY_GET_PRIVATE_INFORMATION) + STANDARD_RIGHTS_READ + | POLICY_VIEW_AUDIT_INFORMATION + | POLICY_GET_PRIVATE_INFORMATION +) POLICY_WRITE = ( - STANDARD_RIGHTS_WRITE | - POLICY_TRUST_ADMIN | - POLICY_CREATE_ACCOUNT | - POLICY_CREATE_SECRET | - POLICY_CREATE_PRIVILEGE | - POLICY_SET_DEFAULT_QUOTA_LIMITS | - POLICY_SET_AUDIT_REQUIREMENTS | - POLICY_AUDIT_LOG_ADMIN | - POLICY_SERVER_ADMIN) + STANDARD_RIGHTS_WRITE + | POLICY_TRUST_ADMIN + | POLICY_CREATE_ACCOUNT + | POLICY_CREATE_SECRET + | POLICY_CREATE_PRIVILEGE + | POLICY_SET_DEFAULT_QUOTA_LIMITS + | POLICY_SET_AUDIT_REQUIREMENTS + | POLICY_AUDIT_LOG_ADMIN + | POLICY_SERVER_ADMIN +) POLICY_EXECUTE = ( - STANDARD_RIGHTS_EXECUTE | - POLICY_VIEW_LOCAL_INFORMATION | - POLICY_LOOKUP_NAMES) + STANDARD_RIGHTS_EXECUTE + | POLICY_VIEW_LOCAL_INFORMATION + | POLICY_LOOKUP_NAMES +) + class TokenAccess: TOKEN_QUERY = 0x8 + class TokenInformationClass: TokenUser = 1 + class TOKEN_USER(ctypes.Structure): num = 1 _fields_ = [ - ('SID', ctypes.c_void_p), - ('ATTRIBUTES', ctypes.wintypes.DWORD), + ("SID", ctypes.c_void_p), + ("ATTRIBUTES", ctypes.wintypes.DWORD), ] @@ -279,19 +294,21 @@ class SECURITY_DESCRIPTOR(ctypes.Structure): PACL Dacl; } SECURITY_DESCRIPTOR; """ + SECURITY_DESCRIPTOR_CONTROL = ctypes.wintypes.USHORT REVISION = 1 _fields_ = [ - ('Revision', ctypes.c_ubyte), - ('Sbz1', ctypes.c_ubyte), - ('Control', SECURITY_DESCRIPTOR_CONTROL), - ('Owner', ctypes.c_void_p), - ('Group', ctypes.c_void_p), - ('Sacl', ctypes.c_void_p), - ('Dacl', ctypes.c_void_p), + ("Revision", ctypes.c_ubyte), + ("Sbz1", ctypes.c_ubyte), + ("Control", SECURITY_DESCRIPTOR_CONTROL), + ("Owner", ctypes.c_void_p), + ("Group", ctypes.c_void_p), + ("Sacl", ctypes.c_void_p), + ("Dacl", ctypes.c_void_p), ] + class SECURITY_ATTRIBUTES(ctypes.Structure): """ typedef struct _SECURITY_ATTRIBUTES { @@ -300,10 +317,11 @@ class SECURITY_ATTRIBUTES(ctypes.Structure): BOOL bInheritHandle; } SECURITY_ATTRIBUTES; """ + _fields_ = [ - ('nLength', ctypes.wintypes.DWORD), - ('lpSecurityDescriptor', ctypes.c_void_p), - ('bInheritHandle', ctypes.wintypes.BOOL), + ("nLength", ctypes.wintypes.DWORD), + ("lpSecurityDescriptor", ctypes.c_void_p), + ("bInheritHandle", ctypes.wintypes.BOOL), ] def __init__(self, *args, **kwargs): @@ -329,37 +347,49 @@ def descriptor(self, value): ######################### # jaraco.windows.security + def GetTokenInformation(token, information_class): """ Given a token, get the token information for it. """ data_size = ctypes.wintypes.DWORD() - ctypes.windll.advapi32.GetTokenInformation(token, information_class.num, - 0, 0, ctypes.byref(data_size)) + ctypes.windll.advapi32.GetTokenInformation( + token, information_class.num, 0, 0, ctypes.byref(data_size) + ) data = ctypes.create_string_buffer(data_size.value) - handle_nonzero_success(ctypes.windll.advapi32.GetTokenInformation(token, - information_class.num, - ctypes.byref(data), ctypes.sizeof(data), - ctypes.byref(data_size))) + handle_nonzero_success( + ctypes.windll.advapi32.GetTokenInformation( + token, + information_class.num, + ctypes.byref(data), + ctypes.sizeof(data), + ctypes.byref(data_size), + ) + ) return ctypes.cast(data, ctypes.POINTER(TOKEN_USER)).contents + def OpenProcessToken(proc_handle, access): result = ctypes.wintypes.HANDLE() proc_handle = ctypes.wintypes.HANDLE(proc_handle) - handle_nonzero_success(ctypes.windll.advapi32.OpenProcessToken( - proc_handle, access, ctypes.byref(result))) + handle_nonzero_success( + ctypes.windll.advapi32.OpenProcessToken( + proc_handle, access, ctypes.byref(result) + ) + ) return result + def get_current_user(): """ Return a TOKEN_USER for the owner of this process. """ process = OpenProcessToken( - ctypes.windll.kernel32.GetCurrentProcess(), - TokenAccess.TOKEN_QUERY, + ctypes.windll.kernel32.GetCurrentProcess(), TokenAccess.TOKEN_QUERY ) return GetTokenInformation(process, TOKEN_USER) + def get_security_attributes_for_user(user=None): """ Return a SECURITY_ATTRIBUTES structure with the SID set to the @@ -377,8 +407,10 @@ def get_security_attributes_for_user(user=None): SA.descriptor = SD SA.bInheritHandle = 1 - ctypes.windll.advapi32.InitializeSecurityDescriptor(ctypes.byref(SD), - SECURITY_DESCRIPTOR.REVISION) - ctypes.windll.advapi32.SetSecurityDescriptorOwner(ctypes.byref(SD), - user.SID, 0) + ctypes.windll.advapi32.InitializeSecurityDescriptor( + ctypes.byref(SD), SECURITY_DESCRIPTOR.REVISION + ) + ctypes.windll.advapi32.SetSecurityDescriptorOwner( + ctypes.byref(SD), user.SID, 0 + ) return SA diff --git a/paramiko/agent.py b/paramiko/agent.py index 6a8e7fb43..1dc99b18a 100644 --- a/paramiko/agent.py +++ b/paramiko/agent.py @@ -42,6 +42,17 @@ cSSH2_AGENTC_SIGN_REQUEST = byte_chr(13) SSH2_AGENT_SIGN_RESPONSE = 14 +SSH_AGENT_RSA_SHA2_256 = 2 +SSH_AGENT_RSA_SHA2_512 = 4 +# NOTE: RFC mildly confusing; while these flags are OR'd together, OpenSSH at +# least really treats them like "AND"s, in the sense that if it finds the +# SHA256 flag set it won't continue looking at the SHA512 one; it +# short-circuits right away. +# Thus, we never want to eg submit 6 to say "either's good". +ALGORITHM_FLAG_MAP = { + "rsa-sha2-256": SSH_AGENT_RSA_SHA2_256, + "rsa-sha2-512": SSH_AGENT_RSA_SHA2_512, +} class AgentSSH(object): @@ -65,7 +76,7 @@ def _connect(self, conn): self._conn = conn ptype, result = self._send_message(cSSH2_AGENTC_REQUEST_IDENTITIES) if ptype != SSH2_AGENT_IDENTITIES_ANSWER: - raise SSHException('could not get keys from ssh-agent') + raise SSHException("could not get keys from ssh-agent") keys = [] for i in range(result.get_int()): keys.append(AgentKey(self, result.get_binary())) @@ -80,19 +91,19 @@ def _close(self): def _send_message(self, msg): msg = asbytes(msg) - self._conn.send(struct.pack('>I', len(msg)) + msg) - l = self._read_all(4) - msg = Message(self._read_all(struct.unpack('>I', l)[0])) + self._conn.send(struct.pack(">I", len(msg)) + msg) + data = self._read_all(4) + msg = Message(self._read_all(struct.unpack(">I", data)[0])) return ord(msg.get_byte()), msg def _read_all(self, wanted): result = self._conn.recv(wanted) while len(result) < wanted: if len(result) == 0: - raise SSHException('lost ssh-agent') + raise SSHException("lost ssh-agent") extra = self._conn.recv(wanted - len(result)) if len(extra) == 0: - raise SSHException('lost ssh-agent') + raise SSHException("lost ssh-agent") result += extra return result @@ -101,6 +112,7 @@ class AgentProxyThread(threading.Thread): """ Class in charge of communication between two channels. """ + def __init__(self, agent): threading.Thread.__init__(self, target=self.run) self._agent = agent @@ -109,19 +121,25 @@ def __init__(self, agent): def run(self): try: (r, addr) = self.get_connection() - # Found that r should be either a socket from the socket library or None + # Found that r should be either + # a socket from the socket library or None self.__inr = r - self.__addr = addr # This should be an IP address as a string? or None + # The address should be an IP address as a string? or None + self.__addr = addr self._agent.connect() - if not isinstance(self._agent, int) and (self._agent._conn is None or not hasattr(self._agent._conn, 'fileno')): + if not isinstance(self._agent, int) and ( + self._agent._conn is None + or not hasattr(self._agent._conn, "fileno") + ): raise AuthenticationException("Unable to connect to SSH agent") self._communicate() except: - #XXX Not sure what to do here ... raise or pass ? + # XXX Not sure what to do here ... raise or pass ? raise def _communicate(self): import fcntl + oldflags = fcntl.fcntl(self.__inr, fcntl.F_GETFL) fcntl.fcntl(self.__inr, fcntl.F_SETFL, oldflags | os.O_NONBLOCK) while not self._exit: @@ -154,6 +172,7 @@ class AgentLocalProxy(AgentProxyThread): Class to be used when wanting to ask a local SSH Agent being asked from a remote fake agent (so use a unix socket for ex.) """ + def __init__(self, agent): AgentProxyThread.__init__(self, agent) @@ -177,6 +196,7 @@ class AgentRemoteProxy(AgentProxyThread): """ Class to be used when wanting to ask a remote SSH Agent """ + def __init__(self, agent, chan): AgentProxyThread.__init__(self, agent) self.__chan = chan @@ -185,6 +205,34 @@ def get_connection(self): return self.__chan, None +def get_agent_connection(): + """ + Returns some SSH agent object, or None if none were found/supported. + + .. versionadded:: 2.10 + """ + if ("SSH_AUTH_SOCK" in os.environ) and (sys.platform != "win32"): + conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + try: + retry_on_signal(lambda: conn.connect(os.environ["SSH_AUTH_SOCK"])) + return conn + except: + # probably a dangling env var: the ssh agent is gone + return + elif sys.platform == "win32": + from . import win_pageant, win_openssh + + conn = None + if win_pageant.can_talk_to_agent(): + conn = win_pageant.PageantConnection() + elif win_openssh.can_talk_to_agent(): + conn = win_openssh.OpenSSHAgentConnection() + return conn + else: + # no agent support + return + + class AgentClientProxy(object): """ Class proxying request as a client: @@ -197,6 +245,7 @@ class AgentClientProxy(object): the remote fake agent and the local agent #. Communication occurs ... """ + def __init__(self, chanRemote): self._conn = None self.__chanR = chanRemote @@ -210,21 +259,8 @@ def connect(self): """ Method automatically called by ``AgentProxyThread.run``. """ - if ('SSH_AUTH_SOCK' in os.environ) and (sys.platform != 'win32'): - conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - try: - retry_on_signal(lambda: conn.connect(os.environ['SSH_AUTH_SOCK'])) - except: - # probably a dangling env var: the ssh agent is gone - return - elif sys.platform == 'win32': - import paramiko.win_pageant as win_pageant - if win_pageant.can_talk_to_agent(): - conn = win_pageant.PageantConnection() - else: - return - else: - # no agent support + conn = get_agent_connection() + if not conn: return self._conn = conn @@ -244,14 +280,15 @@ class AgentServerProxy(AgentSSH): """ :param .Transport t: Transport used for SSH Agent communication forwarding - :raises SSHException: mostly if we lost the agent + :raises: `.SSHException` -- mostly if we lost the agent """ + def __init__(self, t): AgentSSH.__init__(self) self.__t = t - self._dir = tempfile.mkdtemp('sshproxy') + self._dir = tempfile.mkdtemp("sshproxy") os.chmod(self._dir, stat.S_IRWXU) - self._file = self._dir + '/sshproxy.ssh' + self._file = self._dir + "/sshproxy.ssh" self.thread = AgentLocalProxy(self) self.thread.start() @@ -261,8 +298,8 @@ def __del__(self): def connect(self): conn_sock = self.__t.open_forward_agent_channel() if conn_sock is None: - raise SSHException('lost ssh-agent') - conn_sock.set_name('auth-agent') + raise SSHException("lost ssh-agent") + conn_sock.set_name("auth-agent") self._connect(conn_sock) def close(self): @@ -283,7 +320,7 @@ def get_env(self): :return: a dict containing the ``SSH_AUTH_SOCK`` environnement variables """ - return {'SSH_AUTH_SOCK': self._get_filename()} + return {"SSH_AUTH_SOCK": self._get_filename()} def _get_filename(self): return self._file @@ -310,6 +347,7 @@ class AgentRequestHandler(object): # the remote end. session.exec_command("git clone https://my.git.repository/") """ + def __init__(self, chanClient): self._conn = None self.__chanC = chanClient @@ -331,34 +369,26 @@ class Agent(AgentSSH): """ Client interface for using private keys from an SSH agent running on the local machine. If an SSH agent is running, this class can be used to - connect to it and retreive `.PKey` objects which can be used when + connect to it and retrieve `.PKey` objects which can be used when attempting to authenticate to remote SSH servers. Upon initialization, a session with the local machine's SSH agent is opened, if one is running. If no agent is running, initialization will succeed, but `get_keys` will return an empty tuple. - :raises SSHException: + :raises: `.SSHException` -- if an SSH agent is found, but speaks an incompatible protocol + + .. versionchanged:: 2.10 + Added support for native openssh agent on windows (extending previous + putty pageant support) """ + def __init__(self): AgentSSH.__init__(self) - if ('SSH_AUTH_SOCK' in os.environ) and (sys.platform != 'win32'): - conn = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - try: - conn.connect(os.environ['SSH_AUTH_SOCK']) - except: - # probably a dangling env var: the ssh agent is gone - return - elif sys.platform == 'win32': - from . import win_pageant - if win_pageant.can_talk_to_agent(): - conn = win_pageant.PageantConnection() - else: - return - else: - # no agent support + conn = get_agent_connection() + if not conn: return self._connect(conn) @@ -375,9 +405,11 @@ class AgentKey(PKey): authenticating to a remote server (signing). Most other key operations work as expected. """ + def __init__(self, agent, blob): self.agent = agent self.blob = blob + self.public_blob = None self.name = Message(blob).get_text() def asbytes(self): @@ -389,13 +421,17 @@ def __str__(self): def get_name(self): return self.name - def sign_ssh_data(self, data): + @property + def _fields(self): + raise NotImplementedError + + def sign_ssh_data(self, data, algorithm=None): msg = Message() msg.add_byte(cSSH2_AGENTC_SIGN_REQUEST) msg.add_string(self.blob) msg.add_string(data) - msg.add_int(0) + msg.add_int(ALGORITHM_FLAG_MAP.get(algorithm, 0)) ptype, result = self.agent._send_message(msg) if ptype != SSH2_AGENT_SIGN_RESPONSE: - raise SSHException('key cannot be used for signing') + raise SSHException("key cannot be used for signing") return result.get_binary() diff --git a/paramiko/auth_handler.py b/paramiko/auth_handler.py index 38b237296..41ec44870 100644 --- a/paramiko/auth_handler.py +++ b/paramiko/auth_handler.py @@ -21,30 +21,58 @@ """ import weakref -from paramiko.common import cMSG_SERVICE_REQUEST, cMSG_DISCONNECT, \ - DISCONNECT_SERVICE_NOT_AVAILABLE, DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, \ - cMSG_USERAUTH_REQUEST, cMSG_SERVICE_ACCEPT, DEBUG, AUTH_SUCCESSFUL, INFO, \ - cMSG_USERAUTH_SUCCESS, cMSG_USERAUTH_FAILURE, AUTH_PARTIALLY_SUCCESSFUL, \ - cMSG_USERAUTH_INFO_REQUEST, WARNING, AUTH_FAILED, cMSG_USERAUTH_PK_OK, \ - cMSG_USERAUTH_INFO_RESPONSE, MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT, \ - MSG_USERAUTH_REQUEST, MSG_USERAUTH_SUCCESS, MSG_USERAUTH_FAILURE, \ - MSG_USERAUTH_BANNER, MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE, \ - cMSG_USERAUTH_GSSAPI_RESPONSE, cMSG_USERAUTH_GSSAPI_TOKEN, \ - cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, cMSG_USERAUTH_GSSAPI_ERROR, \ - cMSG_USERAUTH_GSSAPI_ERRTOK, cMSG_USERAUTH_GSSAPI_MIC,\ - MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN, \ - MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, MSG_USERAUTH_GSSAPI_ERROR, \ - MSG_USERAUTH_GSSAPI_ERRTOK, MSG_USERAUTH_GSSAPI_MIC, MSG_NAMES - +import time + +from paramiko.common import ( + cMSG_SERVICE_REQUEST, + cMSG_DISCONNECT, + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, + cMSG_USERAUTH_REQUEST, + cMSG_SERVICE_ACCEPT, + DEBUG, + AUTH_SUCCESSFUL, + INFO, + cMSG_USERAUTH_SUCCESS, + cMSG_USERAUTH_FAILURE, + AUTH_PARTIALLY_SUCCESSFUL, + cMSG_USERAUTH_INFO_REQUEST, + WARNING, + AUTH_FAILED, + cMSG_USERAUTH_PK_OK, + cMSG_USERAUTH_INFO_RESPONSE, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_BANNER, + MSG_USERAUTH_INFO_REQUEST, + MSG_USERAUTH_INFO_RESPONSE, + cMSG_USERAUTH_GSSAPI_RESPONSE, + cMSG_USERAUTH_GSSAPI_TOKEN, + cMSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_GSSAPI_RESPONSE, + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, + MSG_NAMES, + cMSG_USERAUTH_BANNER, +) from paramiko.message import Message -from paramiko.py3compat import bytestring -from paramiko.ssh_exception import SSHException, AuthenticationException, \ - BadAuthenticationType, PartialAuthentication +from paramiko.py3compat import b, u +from paramiko.ssh_exception import ( + SSHException, + AuthenticationException, + BadAuthenticationType, + PartialAuthentication, +) from paramiko.server import InteractiveQuery -from paramiko.ssh_gss import GSSAuth +from paramiko.ssh_gss import GSSAuth, GSS_EXCEPTIONS -class AuthHandler (object): +class AuthHandler(object): """ Internal class to handle the mechanics of authentication. """ @@ -54,7 +82,7 @@ def __init__(self, transport): self.username = None self.authenticated = False self.auth_event = None - self.auth_method = '' + self.auth_method = "" self.banner = None self.password = None self.private_key = None @@ -67,6 +95,9 @@ def __init__(self, transport): self.gss_host = None self.gss_deleg_creds = True + def _log(self, *args): + return self.transport._log(*args) + def is_authenticated(self): return self.authenticated @@ -80,7 +111,7 @@ def auth_none(self, username, event): self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'none' + self.auth_method = "none" self.username = username self._request_auth() finally: @@ -90,7 +121,7 @@ def auth_publickey(self, username, key, event): self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'publickey' + self.auth_method = "publickey" self.username = username self.private_key = key self._request_auth() @@ -101,21 +132,21 @@ def auth_password(self, username, password, event): self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'password' + self.auth_method = "password" self.username = username self.password = password self._request_auth() finally: self.transport.lock.release() - def auth_interactive(self, username, handler, event, submethods=''): + def auth_interactive(self, username, handler, event, submethods=""): """ response_list = handler(title, instructions, prompt_list) """ self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'keyboard-interactive' + self.auth_method = "keyboard-interactive" self.username = username self.interactive_handler = handler self.submethods = submethods @@ -127,7 +158,7 @@ def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds, event): self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'gssapi-with-mic' + self.auth_method = "gssapi-with-mic" self.username = username self.gss_host = gss_host self.gss_deleg_creds = gss_deleg_creds @@ -139,7 +170,7 @@ def auth_gssapi_keyex(self, username, event): self.transport.lock.acquire() try: self.auth_event = event - self.auth_method = 'gssapi-keyex' + self.auth_method = "gssapi-keyex" self.username = username self._request_auth() finally: @@ -149,20 +180,20 @@ def abort(self): if self.auth_event is not None: self.auth_event.set() - ### internals... + # ...internals... def _request_auth(self): m = Message() m.add_byte(cMSG_SERVICE_REQUEST) - m.add_string('ssh-userauth') + m.add_string("ssh-userauth") self.transport._send_message(m) def _disconnect_service_not_available(self): m = Message() m.add_byte(cMSG_DISCONNECT) m.add_int(DISCONNECT_SERVICE_NOT_AVAILABLE) - m.add_string('Service not available') - m.add_string('en') + m.add_string("Service not available") + m.add_string("en") self.transport._send_message(m) self.transport.close() @@ -170,37 +201,56 @@ def _disconnect_no_more_auth(self): m = Message() m.add_byte(cMSG_DISCONNECT) m.add_int(DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) - m.add_string('No more auth methods available') - m.add_string('en') + m.add_string("No more auth methods available") + m.add_string("en") self.transport._send_message(m) self.transport.close() - def _get_session_blob(self, key, service, username): + def _get_key_type_and_bits(self, key): + """ + Given any key, return its type/algorithm & bits-to-sign. + + Intended for input to or verification of, key signatures. + """ + # Use certificate contents, if available, plain pubkey otherwise + if key.public_blob: + return key.public_blob.key_type, key.public_blob.key_blob + else: + return key.get_name(), key + + def _get_session_blob(self, key, service, username, algorithm): m = Message() m.add_string(self.transport.session_id) m.add_byte(cMSG_USERAUTH_REQUEST) m.add_string(username) m.add_string(service) - m.add_string('publickey') + m.add_string("publickey") m.add_boolean(True) - m.add_string(key.get_name()) - m.add_string(key) + _, bits = self._get_key_type_and_bits(key) + m.add_string(algorithm) + m.add_string(bits) return m.asbytes() def wait_for_response(self, event): + max_ts = None + if self.transport.auth_timeout is not None: + max_ts = time.time() + self.transport.auth_timeout while True: event.wait(0.1) if not self.transport.is_active(): e = self.transport.get_exception() if (e is None) or issubclass(e.__class__, EOFError): - e = AuthenticationException('Authentication failed.') + e = AuthenticationException("Authentication failed.") raise e if event.is_set(): break + if max_ts is not None and max_ts <= time.time(): + raise AuthenticationException("Authentication timeout.") + if not self.is_authenticated(): e = self.transport.get_exception() if e is None: - e = AuthenticationException('Authentication failed.') + e = AuthenticationException("Authentication failed.") # this is horrible. Python Exception isn't yet descended from # object, so type(e) won't work. :( if issubclass(e.__class__, PartialAuthentication): @@ -210,38 +260,122 @@ def wait_for_response(self, event): def _parse_service_request(self, m): service = m.get_text() - if self.transport.server_mode and (service == 'ssh-userauth'): + if self.transport.server_mode and (service == "ssh-userauth"): # accepted m = Message() m.add_byte(cMSG_SERVICE_ACCEPT) m.add_string(service) self.transport._send_message(m) + banner, language = self.transport.server_object.get_banner() + if banner: + m = Message() + m.add_byte(cMSG_USERAUTH_BANNER) + m.add_string(banner) + m.add_string(language) + self.transport._send_message(m) return # dunno this one self._disconnect_service_not_available() + def _generate_key_from_request(self, algorithm, keyblob): + # For use in server mode. + options = self.transport.preferred_pubkeys + if algorithm.replace("-cert-v01@openssh.com", "") not in options: + err = ( + "Auth rejected: pubkey algorithm '{}' unsupported or disabled" + ) + self._log(INFO, err.format(algorithm)) + return None + return self.transport._key_info[algorithm](Message(keyblob)) + + def _finalize_pubkey_algorithm(self, key_type): + # Short-circuit for non-RSA keys + if "rsa" not in key_type: + return key_type + self._log( + DEBUG, + "Finalizing pubkey algorithm for key of type {!r}".format( + key_type + ), + ) + # Only consider RSA algos from our list, lest we agree on another! + my_algos = [x for x in self.transport.preferred_pubkeys if "rsa" in x] + self._log(DEBUG, "Our pubkey algorithm list: {}".format(my_algos)) + # Short-circuit negatively if user disabled all RSA algos (heh) + if not my_algos: + raise SSHException( + "An RSA key was specified, but no RSA pubkey algorithms are configured!" # noqa + ) + # Check for server-sig-algs if supported & sent + server_algo_str = u( + self.transport.server_extensions.get("server-sig-algs", b("")) + ) + pubkey_algo = None + if server_algo_str: + server_algos = server_algo_str.split(",") + self._log( + DEBUG, "Server-side algorithm list: {}".format(server_algos) + ) + # Only use algos from our list that the server likes, in our own + # preference order. (NOTE: purposefully using same style as in + # Transport...expect to refactor later) + agreement = list(filter(server_algos.__contains__, my_algos)) + if agreement: + pubkey_algo = agreement[0] + self._log( + DEBUG, + "Agreed upon {!r} pubkey algorithm".format(pubkey_algo), + ) + else: + self._log(DEBUG, "No common pubkey algorithms exist! Dying.") + # TODO: MAY want to use IncompatiblePeer again here but that's + # technically for initial key exchange, not pubkey auth. + err = "Unable to agree on a pubkey algorithm for signing a {!r} key!" # noqa + raise AuthenticationException(err.format(key_type)) + else: + # Fallback: first one in our (possibly tweaked by caller) list + pubkey_algo = my_algos[0] + msg = "Server did not send a server-sig-algs list; defaulting to our first preferred algo ({!r})" # noqa + self._log(DEBUG, msg.format(pubkey_algo)) + self._log( + DEBUG, + "NOTE: you may use the 'disabled_algorithms' SSHClient/Transport init kwarg to disable that or other algorithms if your server does not support them!", # noqa + ) + self.transport._agreed_pubkey_algorithm = pubkey_algo + return pubkey_algo + def _parse_service_accept(self, m): service = m.get_text() - if service == 'ssh-userauth': - self.transport._log(DEBUG, 'userauth is OK') + if service == "ssh-userauth": + # TODO 3.0: this message sucks ass. change it to something more + # obvious. it always appears to mean "we already authed" but no! it + # just means "we are allowed to TRY authing!" + self._log(DEBUG, "userauth is OK") m = Message() m.add_byte(cMSG_USERAUTH_REQUEST) m.add_string(self.username) - m.add_string('ssh-connection') + m.add_string("ssh-connection") m.add_string(self.auth_method) - if self.auth_method == 'password': + if self.auth_method == "password": m.add_boolean(False) - password = bytestring(self.password) + password = b(self.password) m.add_string(password) - elif self.auth_method == 'publickey': + elif self.auth_method == "publickey": m.add_boolean(True) - m.add_string(self.private_key.get_name()) - m.add_string(self.private_key) - blob = self._get_session_blob(self.private_key, 'ssh-connection', self.username) - sig = self.private_key.sign_ssh_data(blob) + key_type, bits = self._get_key_type_and_bits(self.private_key) + algorithm = self._finalize_pubkey_algorithm(key_type) + m.add_string(algorithm) + m.add_string(bits) + blob = self._get_session_blob( + self.private_key, + "ssh-connection", + self.username, + algorithm, + ) + sig = self.private_key.sign_ssh_data(blob, algorithm) m.add_string(sig) - elif self.auth_method == 'keyboard-interactive': - m.add_string('') + elif self.auth_method == "keyboard-interactive": + m.add_string("") m.add_string(self.submethods) elif self.auth_method == "gssapi-with-mic": sshgss = GSSAuth(self.auth_method, self.gss_deleg_creds) @@ -259,18 +393,28 @@ def _parse_service_accept(self, m): mech = m.get_string() m = Message() m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) - m.add_string(sshgss.ssh_init_sec_context(self.gss_host, - mech, - self.username,)) + try: + m.add_string( + sshgss.ssh_init_sec_context( + self.gss_host, mech, self.username + ) + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) self.transport._send_message(m) while True: ptype, m = self.transport.packetizer.read_message() if ptype == MSG_USERAUTH_GSSAPI_TOKEN: srv_token = m.get_string() - next_token = sshgss.ssh_init_sec_context(self.gss_host, - mech, - self.username, - srv_token) + try: + next_token = sshgss.ssh_init_sec_context( + self.gss_host, + mech, + self.username, + srv_token, + ) + except GSS_EXCEPTIONS as e: + return self._handle_local_gss_failure(e) # After this step the GSSAPI should not return any # token. If it does, we keep sending the token to # the server until no more token is returned. @@ -282,7 +426,9 @@ def _parse_service_accept(self, m): m.add_string(next_token) self.transport.send_message(m) else: - raise SSHException("Received Package: %s" % MSG_NAMES[ptype]) + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) m = Message() m.add_byte(cMSG_USERAUTH_GSSAPI_MIC) # send the MIC to the server @@ -297,42 +443,56 @@ def _parse_service_accept(self, m): maj_status = m.get_int() min_status = m.get_int() err_msg = m.get_string() - lang_tag = m.get_string() # we don't care! - raise SSHException("GSS-API Error:\nMajor Status: %s\n\ - Minor Status: %s\ \nError Message:\ - %s\n") % (str(maj_status), - str(min_status), - err_msg) + m.get_string() # Lang tag - discarded + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) elif ptype == MSG_USERAUTH_FAILURE: self._parse_userauth_failure(m) return else: - raise SSHException("Received Package: %s" % MSG_NAMES[ptype]) - elif self.auth_method == 'gssapi-keyex' and\ - self.transport.gss_kex_used: + raise SSHException( + "Received Package: {}".format(MSG_NAMES[ptype]) + ) + elif ( + self.auth_method == "gssapi-keyex" + and self.transport.gss_kex_used + ): kexgss = self.transport.kexgss_ctxt kexgss.set_username(self.username) mic_token = kexgss.ssh_get_mic(self.transport.session_id) m.add_string(mic_token) - elif self.auth_method == 'none': + elif self.auth_method == "none": pass else: - raise SSHException('Unknown auth method "%s"' % self.auth_method) + raise SSHException( + 'Unknown auth method "{}"'.format(self.auth_method) + ) self.transport._send_message(m) else: - self.transport._log(DEBUG, 'Service request "%s" accepted (?)' % service) + self._log( + DEBUG, 'Service request "{}" accepted (?)'.format(service) + ) def _send_auth_result(self, username, method, result): # okay, send result m = Message() if result == AUTH_SUCCESSFUL: - self.transport._log(INFO, 'Auth granted (%s).' % method) + self._log(INFO, "Auth granted ({}).".format(method)) m.add_byte(cMSG_USERAUTH_SUCCESS) self.authenticated = True else: - self.transport._log(INFO, 'Auth rejected (%s).' % method) + self._log(INFO, "Auth rejected ({}).".format(method)) m.add_byte(cMSG_USERAUTH_FAILURE) - m.add_string(self.transport.server_object.get_allowed_auths(username)) + m.add_string( + self.transport.server_object.get_allowed_auths(username) + ) if result == AUTH_PARTIALLY_SUCCESSFUL: m.add_boolean(True) else: @@ -362,7 +522,7 @@ def _parse_userauth_request(self, m): # er, uh... what? m = Message() m.add_byte(cMSG_USERAUTH_FAILURE) - m.add_string('none') + m.add_string("none") m.add_boolean(False) self.transport._send_message(m) return @@ -372,58 +532,79 @@ def _parse_userauth_request(self, m): username = m.get_text() service = m.get_text() method = m.get_text() - self.transport._log(DEBUG, 'Auth request (type=%s) service=%s, username=%s' % (method, service, username)) - if service != 'ssh-connection': + self._log( + DEBUG, + "Auth request (type={}) service={}, username={}".format( + method, service, username + ), + ) + if service != "ssh-connection": self._disconnect_service_not_available() return - if (self.auth_username is not None) and (self.auth_username != username): - self.transport._log(WARNING, 'Auth rejected because the client attempted to change username in mid-flight') + if (self.auth_username is not None) and ( + self.auth_username != username + ): + self._log( + WARNING, + "Auth rejected because the client attempted to change username in mid-flight", # noqa + ) self._disconnect_no_more_auth() return self.auth_username = username # check if GSS-API authentication is enabled gss_auth = self.transport.server_object.enable_auth_gssapi() - if method == 'none': + if method == "none": result = self.transport.server_object.check_auth_none(username) - elif method == 'password': + elif method == "password": changereq = m.get_boolean() password = m.get_binary() try: - password = password.decode('UTF-8') + password = password.decode("UTF-8") except UnicodeError: # some clients/servers expect non-utf-8 passwords! # in this case, just return the raw byte string. pass if changereq: - # always treated as failure, since we don't support changing passwords, but collect - # the list of valid auth types from the callback anyway - self.transport._log(DEBUG, 'Auth request to change passwords (rejected)') + # always treated as failure, since we don't support changing + # passwords, but collect the list of valid auth types from + # the callback anyway + self._log(DEBUG, "Auth request to change passwords (rejected)") newpassword = m.get_binary() try: - newpassword = newpassword.decode('UTF-8', 'replace') + newpassword = newpassword.decode("UTF-8", "replace") except UnicodeError: pass result = AUTH_FAILED else: - result = self.transport.server_object.check_auth_password(username, password) - elif method == 'publickey': + result = self.transport.server_object.check_auth_password( + username, password + ) + elif method == "publickey": sig_attached = m.get_boolean() - keytype = m.get_text() + # NOTE: server never wants to guess a client's algo, they're + # telling us directly. No need for _finalize_pubkey_algorithm + # anywhere in this flow. + algorithm = m.get_text() keyblob = m.get_binary() try: - key = self.transport._key_info[keytype](Message(keyblob)) + key = self._generate_key_from_request(algorithm, keyblob) except SSHException as e: - self.transport._log(INFO, 'Auth rejected: public key: %s' % str(e)) + self._log(INFO, "Auth rejected: public key: {}".format(str(e))) key = None - except: - self.transport._log(INFO, 'Auth rejected: unsupported or mangled public key') + except Exception as e: + msg = ( + "Auth rejected: unsupported or mangled public key ({}: {})" + ) # noqa + self._log(INFO, msg.format(e.__class__.__name__, e)) key = None if key is None: self._disconnect_no_more_auth() return # first check if this key is okay... if not, we can skip the verify - result = self.transport.server_object.check_auth_publickey(username, key) + result = self.transport.server_object.check_auth_publickey( + username, key + ) if result != AUTH_FAILED: # key is okay, verify it if not sig_attached: @@ -431,19 +612,22 @@ def _parse_userauth_request(self, m): # signs anything... send special "ok" message m = Message() m.add_byte(cMSG_USERAUTH_PK_OK) - m.add_string(keytype) + m.add_string(algorithm) m.add_string(keyblob) self.transport._send_message(m) return sig = Message(m.get_binary()) - blob = self._get_session_blob(key, service, username) + blob = self._get_session_blob( + key, service, username, algorithm + ) if not key.verify_ssh_sig(blob, sig): - self.transport._log(INFO, 'Auth rejected: invalid signature') + self._log(INFO, "Auth rejected: invalid signature") result = AUTH_FAILED - elif method == 'keyboard-interactive': - lang = m.get_string() + elif method == "keyboard-interactive": submethods = m.get_string() - result = self.transport.server_object.check_auth_interactive(username, submethods) + result = self.transport.server_object.check_auth_interactive( + username, submethods + ) if isinstance(result, InteractiveQuery): # make interactive query instead of response self._interactive_query(result) @@ -457,65 +641,37 @@ def _parse_userauth_request(self, m): # We can't accept more than one OID, so if the SSH client sends # more than one, disconnect. if mechs > 1: - self.transport._log(INFO, - 'Disconnect: Received more than one GSS-API OID mechanism') + self._log( + INFO, + "Disconnect: Received more than one GSS-API OID mechanism", + ) self._disconnect_no_more_auth() desired_mech = m.get_string() mech_ok = sshgss.ssh_check_mech(desired_mech) # if we don't support the mechanism, disconnect. if not mech_ok: - self.transport._log(INFO, - 'Disconnect: Received an invalid GSS-API OID mechanism') + self._log( + INFO, + "Disconnect: Received an invalid GSS-API OID mechanism", + ) self._disconnect_no_more_auth() # send the Kerberos V5 GSSAPI OID to the client supported_mech = sshgss.ssh_gss_oids("server") # RFC 4462 says we are not required to implement GSS-API error # messages. See section 3.8 in http://www.ietf.org/rfc/rfc4462.txt - while True: - m = Message() - m.add_byte(cMSG_USERAUTH_GSSAPI_RESPONSE) - m.add_bytes(supported_mech) - self.transport._send_message(m) - ptype, m = self.transport.packetizer.read_message() - if ptype == MSG_USERAUTH_GSSAPI_TOKEN: - client_token = m.get_string() - # use the client token as input to establish a secure - # context. - try: - token = sshgss.ssh_accept_sec_context(self.gss_host, - client_token, - username) - except Exception: - result = AUTH_FAILED - self._send_auth_result(username, method, result) - raise - if token is not None: - m = Message() - m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) - m.add_string(token) - self.transport._send_message(m) - else: - result = AUTH_FAILED - self._send_auth_result(username, method, result) - return - # check MIC - ptype, m = self.transport.packetizer.read_message() - if ptype == MSG_USERAUTH_GSSAPI_MIC: - break - mic_token = m.get_string() - try: - sshgss.ssh_check_mic(mic_token, - self.transport.session_id, - username) - except Exception: - result = AUTH_FAILED - self._send_auth_result(username, method, result) - raise - # TODO: Implement client credential saving. - # The OpenSSH server is able to create a TGT with the delegated - # client credentials, but this is not supported by GSS-API. - result = AUTH_SUCCESSFUL - self.transport.server_object.check_auth_gssapi_with_mic(username, result) + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_RESPONSE) + m.add_bytes(supported_mech) + self.transport.auth_handler = GssapiWithMicAuthHandler( + self, sshgss + ) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_REQUEST, + MSG_SERVICE_REQUEST, + ) + self.transport._send_message(m) + return elif method == "gssapi-keyex" and gss_auth: mic_token = m.get_string() sshgss = self.transport.kexgss_ctxt @@ -524,22 +680,26 @@ def _parse_userauth_request(self, m): result = AUTH_FAILED self._send_auth_result(username, method, result) try: - sshgss.ssh_check_mic(mic_token, - self.transport.session_id, - self.auth_username) + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, self.auth_username + ) except Exception: result = AUTH_FAILED self._send_auth_result(username, method, result) raise result = AUTH_SUCCESSFUL - self.transport.server_object.check_auth_gssapi_keyex(username, result) + self.transport.server_object.check_auth_gssapi_keyex( + username, result + ) else: result = self.transport.server_object.check_auth_none(username) # okay, send result self._send_auth_result(username, method, result) def _parse_userauth_success(self, m): - self.transport._log(INFO, 'Authentication (%s) successful!' % self.auth_method) + self._log( + INFO, "Authentication ({}) successful!".format(self.auth_method) + ) self.authenticated = True self.transport._auth_trigger() if self.auth_event is not None: @@ -549,15 +709,24 @@ def _parse_userauth_failure(self, m): authlist = m.get_list() partial = m.get_boolean() if partial: - self.transport._log(INFO, 'Authentication continues...') - self.transport._log(DEBUG, 'Methods: ' + str(authlist)) + self._log(INFO, "Authentication continues...") + self._log(DEBUG, "Methods: " + str(authlist)) self.transport.saved_exception = PartialAuthentication(authlist) elif self.auth_method not in authlist: - self.transport._log(DEBUG, 'Authentication type (%s) not permitted.' % self.auth_method) - self.transport._log(DEBUG, 'Allowed methods: ' + str(authlist)) - self.transport.saved_exception = BadAuthenticationType('Bad authentication type', authlist) + for msg in ( + "Authentication type ({}) not permitted.".format( + self.auth_method + ), + "Allowed methods: {}".format(authlist), + ): + self._log(DEBUG, msg) + self.transport.saved_exception = BadAuthenticationType( + "Bad authentication type", authlist + ) else: - self.transport._log(INFO, 'Authentication (%s) failed.' % self.auth_method) + self._log( + INFO, "Authentication ({}) failed.".format(self.auth_method) + ) self.authenticated = False self.username = None if self.auth_event is not None: @@ -566,13 +735,12 @@ def _parse_userauth_failure(self, m): def _parse_userauth_banner(self, m): banner = m.get_string() self.banner = banner - lang = m.get_string() - self.transport._log(INFO, 'Auth banner: %s' % banner) + self._log(INFO, "Auth banner: {}".format(banner)) # who cares. def _parse_userauth_info_request(self, m): - if self.auth_method != 'keyboard-interactive': - raise SSHException('Illegal info request from server') + if self.auth_method != "keyboard-interactive": + raise SSHException("Illegal info request from server") title = m.get_text() instructions = m.get_text() m.get_binary() # lang @@ -580,7 +748,9 @@ def _parse_userauth_info_request(self, m): prompt_list = [] for i in range(prompts): prompt_list.append((m.get_text(), m.get_boolean())) - response_list = self.interactive_handler(title, instructions, prompt_list) + response_list = self.interactive_handler( + title, instructions, prompt_list + ) m = Message() m.add_byte(cMSG_USERAUTH_INFO_RESPONSE) @@ -591,25 +761,169 @@ def _parse_userauth_info_request(self, m): def _parse_userauth_info_response(self, m): if not self.transport.server_mode: - raise SSHException('Illegal info response from server') + raise SSHException("Illegal info response from server") n = m.get_int() responses = [] for i in range(n): responses.append(m.get_text()) - result = self.transport.server_object.check_auth_interactive_response(responses) - if isinstance(type(result), InteractiveQuery): + result = self.transport.server_object.check_auth_interactive_response( + responses + ) + if isinstance(result, InteractiveQuery): # make interactive query instead of response self._interactive_query(result) return - self._send_auth_result(self.auth_username, 'keyboard-interactive', result) - - _handler_table = { + self._send_auth_result( + self.auth_username, "keyboard-interactive", result + ) + + def _handle_local_gss_failure(self, e): + self.transport.saved_exception = e + self._log(DEBUG, "GSSAPI failure: {}".format(e)) + self._log(INFO, "Authentication ({}) failed.".format(self.auth_method)) + self.authenticated = False + self.username = None + if self.auth_event is not None: + self.auth_event.set() + return + + # TODO: do the same to the other tables, in Transport. + # TODO 3.0: MAY make sense to make these tables into actual + # classes/instances that can be fed a mode bool or whatever. Or, + # alternately (both?) make the message types small classes or enums that + # embed this info within themselves (which could also then tidy up the + # current 'integer -> human readable short string' stuff in common.py). + # TODO: if we do that, also expose 'em publicly. + + # Messages which should be handled _by_ servers (sent by clients) + _server_handler_table = { MSG_SERVICE_REQUEST: _parse_service_request, - MSG_SERVICE_ACCEPT: _parse_service_accept, MSG_USERAUTH_REQUEST: _parse_userauth_request, + MSG_USERAUTH_INFO_RESPONSE: _parse_userauth_info_response, + } + + # Messages which should be handled _by_ clients (sent by servers) + _client_handler_table = { + MSG_SERVICE_ACCEPT: _parse_service_accept, MSG_USERAUTH_SUCCESS: _parse_userauth_success, MSG_USERAUTH_FAILURE: _parse_userauth_failure, MSG_USERAUTH_BANNER: _parse_userauth_banner, MSG_USERAUTH_INFO_REQUEST: _parse_userauth_info_request, - MSG_USERAUTH_INFO_RESPONSE: _parse_userauth_info_response, } + + # NOTE: prior to the fix for #1283, this was a static dict instead of a + # property. Should be backwards compatible in most/all cases. + @property + def _handler_table(self): + if self.transport.server_mode: + return self._server_handler_table + else: + return self._client_handler_table + + +class GssapiWithMicAuthHandler(object): + """A specialized Auth handler for gssapi-with-mic + + During the GSSAPI token exchange we need a modified dispatch table, + because the packet type numbers are not unique. + """ + + method = "gssapi-with-mic" + + def __init__(self, delegate, sshgss): + self._delegate = delegate + self.sshgss = sshgss + + def abort(self): + self._restore_delegate_auth_handler() + return self._delegate.abort() + + @property + def transport(self): + return self._delegate.transport + + @property + def _send_auth_result(self): + return self._delegate._send_auth_result + + @property + def auth_username(self): + return self._delegate.auth_username + + @property + def gss_host(self): + return self._delegate.gss_host + + def _restore_delegate_auth_handler(self): + self.transport.auth_handler = self._delegate + + def _parse_userauth_gssapi_token(self, m): + client_token = m.get_string() + # use the client token as input to establish a secure + # context. + sshgss = self.sshgss + try: + token = sshgss.ssh_accept_sec_context( + self.gss_host, client_token, self.auth_username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._restore_delegate_auth_handler() + self._send_auth_result(self.auth_username, self.method, result) + raise + if token is not None: + m = Message() + m.add_byte(cMSG_USERAUTH_GSSAPI_TOKEN) + m.add_string(token) + self.transport._expected_packet = ( + MSG_USERAUTH_GSSAPI_TOKEN, + MSG_USERAUTH_GSSAPI_MIC, + MSG_USERAUTH_REQUEST, + ) + self.transport._send_message(m) + + def _parse_userauth_gssapi_mic(self, m): + mic_token = m.get_string() + sshgss = self.sshgss + username = self.auth_username + self._restore_delegate_auth_handler() + try: + sshgss.ssh_check_mic( + mic_token, self.transport.session_id, username + ) + except Exception as e: + self.transport.saved_exception = e + result = AUTH_FAILED + self._send_auth_result(username, self.method, result) + raise + # TODO: Implement client credential saving. + # The OpenSSH server is able to create a TGT with the delegated + # client credentials, but this is not supported by GSS-API. + result = AUTH_SUCCESSFUL + self.transport.server_object.check_auth_gssapi_with_mic( + username, result + ) + # okay, send result + self._send_auth_result(username, self.method, result) + + def _parse_service_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_service_request(m) + + def _parse_userauth_request(self, m): + self._restore_delegate_auth_handler() + return self._delegate._parse_userauth_request(m) + + __handler_table = { + MSG_SERVICE_REQUEST: _parse_service_request, + MSG_USERAUTH_REQUEST: _parse_userauth_request, + MSG_USERAUTH_GSSAPI_TOKEN: _parse_userauth_gssapi_token, + MSG_USERAUTH_GSSAPI_MIC: _parse_userauth_gssapi_mic, + } + + @property + def _handler_table(self): + # TODO: determine if we can cut this up like we did for the primary + # AuthHandler class. + return self.__handler_table diff --git a/paramiko/ber.py b/paramiko/ber.py index a388df07e..92d7121e1 100644 --- a/paramiko/ber.py +++ b/paramiko/ber.py @@ -21,7 +21,7 @@ import paramiko.util as util -class BERException (Exception): +class BERException(Exception): pass @@ -41,7 +41,7 @@ def __str__(self): return self.asbytes() def __repr__(self): - return 'BER(\'' + repr(self.content) + '\')' + return "BER('" + repr(self.content) + "')" def decode(self): return self.decode_next() @@ -71,12 +71,14 @@ def decode_next(self): t = size & 0x7f if self.idx + t > len(self.content): return None - size = util.inflate_long(self.content[self.idx: self.idx + t], True) + size = util.inflate_long( + self.content[self.idx : self.idx + t], True + ) self.idx += t if self.idx + size > len(self.content): # can't fit return None - data = self.content[self.idx: self.idx + size] + data = self.content[self.idx : self.idx + size] self.idx += size # now switch on id if ident == 0x30: @@ -87,7 +89,8 @@ def decode_next(self): return util.inflate_long(data) else: # 1: boolean (00 false, otherwise true) - raise BERException('Unknown ber encoding type %d (robey is lazy)' % ident) + msg = "Unknown ber encoding type {:d} (robey is lazy)" + raise BERException(msg.format(ident)) @staticmethod def decode_sequence(data): @@ -123,7 +126,9 @@ def encode(self, x): elif (type(x) is list) or (type(x) is tuple): self.encode_tlv(0x30, self.encode_sequence(x)) else: - raise BERException('Unknown type for encoding: %s' % repr(type(x))) + raise BERException( + "Unknown type for encoding: {!r}".format(type(x)) + ) @staticmethod def encode_sequence(data): diff --git a/paramiko/buffered_pipe.py b/paramiko/buffered_pipe.py index 605f51e9e..69445c97c 100644 --- a/paramiko/buffered_pipe.py +++ b/paramiko/buffered_pipe.py @@ -28,34 +28,38 @@ from paramiko.py3compat import PY2, b -class PipeTimeout (IOError): +class PipeTimeout(IOError): """ Indicates that a timeout was reached on a read from a `.BufferedPipe`. """ + pass -class BufferedPipe (object): +class BufferedPipe(object): """ A buffer that obeys normal read (with timeout) & close semantics for a file or socket, but is fed data from another thread. This is used by `.Channel`. """ - + def __init__(self): self._lock = threading.Lock() self._cv = threading.Condition(self._lock) self._event = None - self._buffer = array.array('B') + self._buffer = array.array("B") self._closed = False if PY2: + def _buffer_frombytes(self, data): self._buffer.fromstring(data) def _buffer_tobytes(self, limit=None): return self._buffer[:limit].tostring() + else: + def _buffer_frombytes(self, data): self._buffer.frombytes(data) @@ -67,7 +71,7 @@ def set_event(self, event): Set an event on this buffer. When data is ready to be read (or the buffer has been closed), the event will be set. When no data is ready, the event will be cleared. - + :param threading.Event event: the event to set/clear """ self._lock.acquire() @@ -84,13 +88,13 @@ def set_event(self, event): event.clear() finally: self._lock.release() - + def feed(self, data): """ Feed new data into this pipe. This method is assumed to be called from a separate thread, so synchronization is done. - - :param data: the data to add, as a `str` or `bytes` + + :param data: the data to add, as a ``str`` or ``bytes`` """ self._lock.acquire() try: @@ -106,7 +110,7 @@ def read_ready(self): Returns true if data is buffered and ready to be read from this feeder. A ``False`` result does not mean that the feeder has closed; it means you may need to wait before more data arrives. - + :return: ``True`` if a `read` call would immediately return at least one byte; ``False`` otherwise. @@ -134,11 +138,11 @@ def read(self, nbytes, timeout=None): :param int nbytes: maximum number of bytes to read :param float timeout: maximum seconds to wait (or ``None``, the default, to wait forever) - :return: the read data, as a `bytes` - - :raises PipeTimeout: - if a timeout was specified and no data was ready before that - timeout + :return: the read data, as a ``str`` or ``bytes`` + + :raises: + `.PipeTimeout` -- if a timeout was specified and no data was ready + before that timeout """ out = bytes() self._lock.acquire() @@ -172,11 +176,11 @@ def read(self, nbytes, timeout=None): self._lock.release() return out - + def empty(self): """ Clear out the buffer and return all data that was in it. - + :return: any data that was in the buffer prior to clearing it out, as a `str` @@ -190,7 +194,7 @@ def empty(self): return out finally: self._lock.release() - + def close(self): """ Close this pipe object. Future calls to `read` after the buffer @@ -208,7 +212,7 @@ def close(self): def __len__(self): """ Return the number of bytes buffered. - + :return: number (`int`) of bytes buffered """ self._lock.acquire() diff --git a/paramiko/channel.py b/paramiko/channel.py index 3a05bdc47..72f650125 100644 --- a/paramiko/channel.py +++ b/paramiko/channel.py @@ -25,13 +25,23 @@ import socket import time import threading + +# TODO: switch as much of py3compat.py to 'six' as possible, then use six.wraps from functools import wraps from paramiko import util -from paramiko.common import cMSG_CHANNEL_REQUEST, cMSG_CHANNEL_WINDOW_ADJUST, \ - cMSG_CHANNEL_DATA, cMSG_CHANNEL_EXTENDED_DATA, DEBUG, ERROR, \ - cMSG_CHANNEL_SUCCESS, cMSG_CHANNEL_FAILURE, cMSG_CHANNEL_EOF, \ - cMSG_CHANNEL_CLOSE +from paramiko.common import ( + cMSG_CHANNEL_REQUEST, + cMSG_CHANNEL_WINDOW_ADJUST, + cMSG_CHANNEL_DATA, + cMSG_CHANNEL_EXTENDED_DATA, + DEBUG, + ERROR, + cMSG_CHANNEL_SUCCESS, + cMSG_CHANNEL_FAILURE, + cMSG_CHANNEL_EOF, + cMSG_CHANNEL_CLOSE, +) from paramiko.message import Message from paramiko.py3compat import bytes_types from paramiko.ssh_exception import SSHException @@ -45,9 +55,11 @@ def open_only(func): """ Decorator for `.Channel` methods which performs an openness check. - :raises SSHException: - If the wrapped method is called on an unopened `.Channel`. + :raises: + `.SSHException` -- If the wrapped method is called on an unopened + `.Channel`. """ + @wraps(func) def _check(self, *args, **kwds): if ( @@ -56,12 +68,13 @@ def _check(self, *args, **kwds): or self.eof_sent or not self.active ): - raise SSHException('Channel is not open') + raise SSHException("Channel is not open") return func(self, *args, **kwds) + return _check -class Channel (ClosingContextManager): +class Channel(ClosingContextManager): """ A secure tunnel across an SSH `.Transport`. A Channel is meant to behave like a socket, and has an API that should be indistinguishable from the @@ -74,7 +87,7 @@ class Channel (ClosingContextManager): flow-controlled independently.) Similarly, if the server isn't reading data you send, calls to `send` may block, unless you set a timeout. This is exactly like a normal network socket, so it shouldn't be too surprising. - + Instances of this class may be used as context managers. """ @@ -114,7 +127,7 @@ def __init__(self, chanid): self.in_window_sofar = 0 self.status_event = threading.Event() self._name = str(chanid) - self.logger = util.get_logger('paramiko.transport') + self.logger = util.get_logger("paramiko.transport") self._pipe = None self.event = threading.Event() self.event_ready = False @@ -132,44 +145,52 @@ def __repr__(self): """ Return a string representation of this object, for debugging. """ - out = ' 0: - out += ' in-buffer=%d' % (len(self.in_buffer),) - out += ' -> ' + repr(self.transport) - out += '>' + out += " in-buffer={}".format(len(self.in_buffer)) + out += " -> " + repr(self.transport) + out += ">" return out @open_only - def get_pty(self, term='vt100', width=80, height=24, width_pixels=0, - height_pixels=0): + def get_pty( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + ): """ Request a pseudo-terminal from the server. This is usually used right after creating a client channel, to ask the server to provide some basic terminal semantics for a shell invoked with `invoke_shell`. It isn't necessary (or desirable) to call this method if you're going - to exectue a single command with `exec_command`. + to execute a single command with `exec_command`. - :param str term: the terminal type to emulate (for example, ``'vt100'``) + :param str term: the terminal type to emulate + (for example, ``'vt100'``) :param int width: width (in characters) of the terminal screen :param int height: height (in characters) of the terminal screen :param int width_pixels: width (in pixels) of the terminal screen :param int height_pixels: height (in pixels) of the terminal screen - :raises SSHException: - if the request was rejected or the channel was closed + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('pty-req') + m.add_string("pty-req") m.add_boolean(True) m.add_string(term) m.add_int(width) @@ -195,13 +216,14 @@ def invoke_shell(self): When the shell exits, the channel will be closed and can't be reused. You must open a new channel if you wish to open another shell. - :raises SSHException: if the request was rejected or the channel was + :raises: + `.SSHException` -- if the request was rejected or the channel was closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('shell') + m.add_string("shell") m.add_boolean(True) self._event_pending() self.transport._send_user_message(m) @@ -220,13 +242,14 @@ def exec_command(self, command): :param str command: a shell command to execute. - :raises SSHException: if the request was rejected or the channel was + :raises: + `.SSHException` -- if the request was rejected or the channel was closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('exec') + m.add_string("exec") m.add_boolean(True) m.add_string(command) self._event_pending() @@ -245,13 +268,14 @@ def invoke_subsystem(self, subsystem): :param str subsystem: name of the subsystem being requested. - :raises SSHException: - if the request was rejected or the channel was closed + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('subsystem') + m.add_string("subsystem") m.add_boolean(True) m.add_string(subsystem) self._event_pending() @@ -269,13 +293,14 @@ def resize_pty(self, width=80, height=24, width_pixels=0, height_pixels=0): :param int width_pixels: new width (in pixels) of the terminal screen :param int height_pixels: new height (in pixels) of the terminal screen - :raises SSHException: - if the request was rejected or the channel was closed + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('window-change') + m.add_string("window-change") m.add_boolean(False) m.add_int(width) m.add_int(height) @@ -283,6 +308,59 @@ def resize_pty(self, width=80, height=24, width_pixels=0, height_pixels=0): m.add_int(height_pixels) self.transport._send_user_message(m) + @open_only + def update_environment(self, environment): + """ + Updates this channel's remote shell environment. + + .. note:: + This operation is additive - i.e. the current environment is not + reset before the given environment variables are set. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `set_environment_variable` for details. + + :param dict environment: + a dictionary containing the name and respective values to set + :raises: + `.SSHException` -- if any of the environment variables was rejected + by the server or the channel was closed + """ + for name, value in environment.items(): + try: + self.set_environment_variable(name, value) + except SSHException as e: + err = 'Failed to set environment variable "{}".' + raise SSHException(err.format(name), e) + + @open_only + def set_environment_variable(self, name, value): + """ + Set the value of an environment variable. + + .. warning:: + The server may reject this request depending on its ``AcceptEnv`` + setting; such rejections will fail silently (which is common client + practice for this particular request type). Make sure you + understand your server's configuration before using! + + :param str name: name of the environment variable + :param str value: value of the environment variable + + :raises: + `.SSHException` -- if the request was rejected or the channel was + closed + """ + m = Message() + m.add_byte(cMSG_CHANNEL_REQUEST) + m.add_int(self.remote_chanid) + m.add_string("env") + m.add_boolean(False) + m.add_string(name) + m.add_string(value) + self.transport._send_user_message(m) + def exit_status_ready(self): """ Return true if the remote process has exited and returned an exit @@ -311,11 +389,11 @@ def recv_exit_status(self): `.Transport` or session's ``window_size`` (e.g. that set by the ``default_window_size`` kwarg for `.Transport.__init__`) will cause `.recv_exit_status` to hang indefinitely if it is called prior to a - sufficiently large `~Channel..read` (or if there are no threads - calling `~Channel.read` in the background). + sufficiently large `.Channel.recv` (or if there are no threads + calling `.Channel.recv` in the background). In these cases, ensuring that `.recv_exit_status` is called *after* - `~Channel.read` (or, again, using threads) can avoid the hang. + `.Channel.recv` (or, again, using threads) can avoid the hang. :return: the exit code (as an `int`) of the process on the server. @@ -341,14 +419,20 @@ def send_exit_status(self, status): m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('exit-status') + m.add_string("exit-status") m.add_boolean(False) m.add_int(status) self.transport._send_user_message(m) @open_only - def request_x11(self, screen_number=0, auth_protocol=None, auth_cookie=None, - single_connection=False, handler=None): + def request_x11( + self, + screen_number=0, + auth_protocol=None, + auth_cookie=None, + single_connection=False, + handler=None, + ): """ Request an x11 session on this channel. If the server allows it, further x11 requests can be made from the server to the client, @@ -364,7 +448,7 @@ def request_x11(self, screen_number=0, auth_protocol=None, auth_cookie=None, generated, used, and returned. You will need to use this value to verify incoming x11 requests and replace them with the actual local x11 cookie (which requires some knowledge of the x11 protocol). - + If a handler is passed in, the handler is called from another thread whenever a new x11 connection arrives. The default handler queues up incoming x11 connections, which may be retrieved using @@ -383,19 +467,19 @@ def request_x11(self, screen_number=0, auth_protocol=None, auth_cookie=None, if True, only a single x11 connection will be forwarded (by default, any number of x11 connections can arrive over this session) - :param function handler: - an optional handler to use for incoming X11 connections + :param handler: + an optional callable handler to use for incoming X11 connections :return: the auth_cookie used """ if auth_protocol is None: - auth_protocol = 'MIT-MAGIC-COOKIE-1' + auth_protocol = "MIT-MAGIC-COOKIE-1" if auth_cookie is None: auth_cookie = binascii.hexlify(os.urandom(16)) m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('x11-req') + m.add_string("x11-req") m.add_boolean(True) m.add_boolean(single_connection) m.add_string(auth_protocol) @@ -413,17 +497,19 @@ def request_forward_agent(self, handler): Request for a forward SSH Agent on this channel. This is only valid for an ssh-agent from OpenSSH !!! - :param function handler: - a required handler to use for incoming SSH Agent connections + :param handler: + a required callable handler to use for incoming SSH Agent + connections - :return: True if we are ok, else False (at that time we always return ok) + :return: True if we are ok, else False + (at that time we always return ok) :raises: SSHException in case of channel problem. """ m = Message() m.add_byte(cMSG_CHANNEL_REQUEST) m.add_int(self.remote_chanid) - m.add_string('auth-agent-req@openssh.com') + m.add_string("auth-agent-req@openssh.com") m.add_boolean(False) self.transport._send_user_message(m) self.transport._set_forward_agent_handler(handler) @@ -497,16 +583,16 @@ def set_combine_stderr(self, combine): self._feed(data) return old - ### socket API + # ...socket API... def settimeout(self, timeout): """ Set a timeout on blocking read/write operations. The ``timeout`` - argument can be a nonnegative float expressing seconds, or ``None``. If - a float is given, subsequent channel read/write operations will raise - a timeout exception if the timeout period value has elapsed before the - operation has completed. Setting a timeout of ``None`` disables - timeouts on socket operations. + argument can be a nonnegative float expressing seconds, or ``None``. + If a float is given, subsequent channel read/write operations will + raise a timeout exception if the timeout period value has elapsed + before the operation has completed. Setting a timeout of ``None`` + disables timeouts on socket operations. ``chan.settimeout(0.0)`` is equivalent to ``chan.setblocking(0)``; ``chan.settimeout(None)`` is equivalent to ``chan.setblocking(1)``. @@ -600,11 +686,11 @@ def recv(self, nbytes): """ Receive data from the channel. The return value is a string representing the data received. The maximum amount of data to be - received at once is specified by ``nbytes``. If a string of length zero - is returned, the channel stream has closed. + received at once is specified by ``nbytes``. If a string of + length zero is returned, the channel stream has closed. :param int nbytes: maximum number of bytes to read. - :return: received data, as a `bytes` + :return: received data, as a ``str``/``bytes``. :raises socket.timeout: if no data is ready before the timeout set by `settimeout`. @@ -738,7 +824,6 @@ def send_stderr(self, s): m.add_int(1) return self._send(s, m) - def sendall(self, s): """ Send data to the channel, without allowing partial results. Unlike @@ -751,7 +836,7 @@ def sendall(self, s): if sending stalled for longer than the timeout set by `settimeout`. :raises socket.error: if an error occurred before the entire string was sent. - + .. note:: If the channel is closed while only part of the data has been sent, there is no way to determine how much data (if any) was sent. @@ -775,7 +860,7 @@ def sendall_stderr(self, s): if sending stalled for longer than the timeout set by `settimeout`. :raises socket.error: if an error occurred before the entire string was sent. - + .. versionadded:: 1.1 """ while s: @@ -804,12 +889,30 @@ def makefile_stderr(self, *params): client, it only makes sense to open this file for reading. For a server, it only makes sense to open this file for writing. - :return: `.ChannelFile` object which can be used for Python file I/O. + :returns: + `.ChannelStderrFile` object which can be used for Python file I/O. .. versionadded:: 1.1 """ return ChannelStderrFile(*([self] + list(params))) + def makefile_stdin(self, *params): + """ + Return a file-like object associated with this channel's stdin + stream. + + The optional ``mode`` and ``bufsize`` arguments are interpreted the + same way as by the built-in ``file()`` function in Python. For a + client, it only makes sense to open this file for writing. For a + server, it only makes sense to open this file for reading. + + :returns: + `.ChannelStdinFile` object which can be used for Python file I/O. + + .. versionadded:: 2.6 + """ + return ChannelStdinFile(*([self] + list(params))) + def fileno(self): """ Returns an OS-level file descriptor which can be used for polling, but @@ -893,7 +996,7 @@ def _closed(self): # attribute instead of a semipublic .closed attribute. return self.closed - ### calls from Transport + # ...calls from Transport def _set_transport(self, transport): self.transport = transport @@ -902,21 +1005,25 @@ def _set_transport(self, transport): def _set_window(self, window_size, max_packet_size): self.in_window_size = window_size self.in_max_packet_size = max_packet_size - # threshold of bytes we receive before we bother to send a window update + # threshold of bytes we receive before we bother to send + # a window update self.in_window_threshold = window_size // 10 self.in_window_sofar = 0 - self._log(DEBUG, 'Max packet in: %d bytes' % max_packet_size) + self._log(DEBUG, "Max packet in: {} bytes".format(max_packet_size)) def _set_remote_channel(self, chanid, window_size, max_packet_size): self.remote_chanid = chanid self.out_window_size = window_size - self.out_max_packet_size = self.transport. \ - _sanitize_packet_size(max_packet_size) + self.out_max_packet_size = self.transport._sanitize_packet_size( + max_packet_size + ) self.active = 1 - self._log(DEBUG, 'Max packet out: %d bytes' % self.out_max_packet_size) + self._log( + DEBUG, "Max packet out: {} bytes".format(self.out_max_packet_size) + ) def _request_success(self, m): - self._log(DEBUG, 'Sesch channel %d request ok' % self.chanid) + self._log(DEBUG, "Sesch channel {} request ok".format(self.chanid)) self.event_ready = True self.event.set() return @@ -943,7 +1050,9 @@ def _feed_extended(self, m): code = m.get_int() s = m.get_binary() if code != 1: - self._log(ERROR, 'unknown extended_data type %d; discarding' % code) + self._log( + ERROR, "unknown extended_data type {}; discarding".format(code) + ) return if self.combine_stderr: self._feed(s) @@ -955,7 +1064,7 @@ def _window_adjust(self, m): self.lock.acquire() try: if self.ultra_debug: - self._log(DEBUG, 'window up %d' % nbytes) + self._log(DEBUG, "window up {}".format(nbytes)) self.out_window_size += nbytes self.out_buffer_cv.notifyAll() finally: @@ -966,14 +1075,14 @@ def _handle_request(self, m): want_reply = m.get_boolean() server = self.transport.server_object ok = False - if key == 'exit-status': + if key == "exit-status": self.exit_status = m.get_int() self.status_event.set() ok = True - elif key == 'xon-xoff': + elif key == "xon-xoff": # ignore ok = True - elif key == 'pty-req': + elif key == "pty-req": term = m.get_string() width = m.get_int() height = m.get_int() @@ -983,33 +1092,34 @@ def _handle_request(self, m): if server is None: ok = False else: - ok = server.check_channel_pty_request(self, term, width, height, pixelwidth, - pixelheight, modes) - elif key == 'shell': + ok = server.check_channel_pty_request( + self, term, width, height, pixelwidth, pixelheight, modes + ) + elif key == "shell": if server is None: ok = False else: ok = server.check_channel_shell_request(self) - elif key == 'env': + elif key == "env": name = m.get_string() value = m.get_string() if server is None: ok = False else: ok = server.check_channel_env_request(self, name, value) - elif key == 'exec': + elif key == "exec": cmd = m.get_string() if server is None: ok = False else: ok = server.check_channel_exec_request(self, cmd) - elif key == 'subsystem': + elif key == "subsystem": name = m.get_text() if server is None: ok = False else: ok = server.check_channel_subsystem_request(self, name) - elif key == 'window-change': + elif key == "window-change": width = m.get_int() height = m.get_int() pixelwidth = m.get_int() @@ -1017,9 +1127,10 @@ def _handle_request(self, m): if server is None: ok = False else: - ok = server.check_channel_window_change_request(self, width, height, pixelwidth, - pixelheight) - elif key == 'x11-req': + ok = server.check_channel_window_change_request( + self, width, height, pixelwidth, pixelheight + ) + elif key == "x11-req": single_connection = m.get_boolean() auth_proto = m.get_text() auth_cookie = m.get_binary() @@ -1027,15 +1138,20 @@ def _handle_request(self, m): if server is None: ok = False else: - ok = server.check_channel_x11_request(self, single_connection, - auth_proto, auth_cookie, screen_number) - elif key == 'auth-agent-req@openssh.com': + ok = server.check_channel_x11_request( + self, + single_connection, + auth_proto, + auth_cookie, + screen_number, + ) + elif key == "auth-agent-req@openssh.com": if server is None: ok = False else: ok = server.check_channel_forward_agent_request(self) else: - self._log(DEBUG, 'Unhandled channel request "%s"' % key) + self._log(DEBUG, 'Unhandled channel request "{}"'.format(key)) ok = False if want_reply: m = Message() @@ -1057,7 +1173,7 @@ def _handle_eof(self, m): self._pipe.set_forever() finally: self.lock.release() - self._log(DEBUG, 'EOF received (%s)', self._name) + self._log(DEBUG, "EOF received ({})".format(self._name)) def _handle_close(self, m): self.lock.acquire() @@ -1070,15 +1186,16 @@ def _handle_close(self, m): if m is not None: self.transport._send_user_message(m) - ### internals... + # ...internals... def _send(self, s, m): size = len(s) self.lock.acquire() try: if self.closed: - # this doesn't seem useful, but it is the documented behavior of Socket - raise socket.error('Socket is closed') + # this doesn't seem useful, but it is the documented behavior + # of Socket + raise socket.error("Socket is closed") size = self._wait_for_send_window(size) if size == 0: # eof or similar @@ -1105,7 +1222,7 @@ def _wait_for_event(self): return e = self.transport.get_exception() if e is None: - e = SSHException('Channel closed.') + e = SSHException("Channel closed.") raise e def _set_closed(self): @@ -1128,7 +1245,7 @@ def _send_eof(self): m.add_byte(cMSG_CHANNEL_EOF) m.add_int(self.remote_chanid) self.eof_sent = True - self._log(DEBUG, 'EOF sent (%s)', self._name) + self._log(DEBUG, "EOF sent ({})".format(self._name)) return m def _close_internal(self): @@ -1145,7 +1262,8 @@ def _close_internal(self): return m1, m2 def _unlink(self): - # server connection could die before we become active: still signal the close! + # server connection could die before we become active: + # still signal the close! if self.closed: return self.lock.acquire() @@ -1161,12 +1279,14 @@ def _check_add_window(self, n): if self.closed or self.eof_received or not self.active: return 0 if self.ultra_debug: - self._log(DEBUG, 'addwindow %d' % n) + self._log(DEBUG, "addwindow {}".format(n)) self.in_window_sofar += n if self.in_window_sofar <= self.in_window_threshold: return 0 if self.ultra_debug: - self._log(DEBUG, 'addwindow send %d' % self.in_window_sofar) + self._log( + DEBUG, "addwindow send {}".format(self.in_window_sofar) + ) out = self.in_window_sofar self.in_window_sofar = 0 return out @@ -1188,7 +1308,8 @@ def _wait_for_send_window(self, size): # should we block? if self.timeout == 0.0: raise socket.timeout() - # loop here in case we get woken up but a different thread has filled the buffer + # loop here in case we get woken up but a different thread has + # filled the buffer timeout = self.timeout while self.out_window_size == 0: if self.closed or self.eof_sent: @@ -1208,11 +1329,11 @@ def _wait_for_send_window(self, size): size = self.out_max_packet_size - 64 self.out_window_size -= size if self.ultra_debug: - self._log(DEBUG, 'window down to %d' % self.out_window_size) + self._log(DEBUG, "window down to {}".format(self.out_window_size)) return size -class ChannelFile (BufferedFile): +class ChannelFile(BufferedFile): """ A file-like wrapper around `.Channel`. A ChannelFile is created by calling `Channel.makefile`. @@ -1225,7 +1346,7 @@ class ChannelFile (BufferedFile): flush the buffer. """ - def __init__(self, channel, mode='r', bufsize=-1): + def __init__(self, channel, mode="r", bufsize=-1): self.channel = channel BufferedFile.__init__(self) self._set_mode(mode, bufsize) @@ -1234,7 +1355,7 @@ def __repr__(self): """ Returns a string representation of this object, for debugging. """ - return '' + return "" def _read(self, size): return self.channel.recv(size) @@ -1244,9 +1365,12 @@ def _write(self, data): return len(data) -class ChannelStderrFile (ChannelFile): - def __init__(self, channel, mode='r', bufsize=-1): - ChannelFile.__init__(self, channel, mode, bufsize) +class ChannelStderrFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stderr. + + See `Channel.makefile_stderr` for details. + """ def _read(self, size): return self.channel.recv_stderr(size) @@ -1256,4 +1380,13 @@ def _write(self, data): return len(data) -# vim: set shiftwidth=4 expandtab : +class ChannelStdinFile(ChannelFile): + """ + A file-like wrapper around `.Channel` stdin. + + See `Channel.makefile_stdin` for details. + """ + + def close(self): + super(ChannelStdinFile, self).close() + self.channel.shutdown_write() diff --git a/paramiko/client.py b/paramiko/client.py index ebf21b082..80c956cd1 100644 --- a/paramiko/client.py +++ b/paramiko/client.py @@ -22,6 +22,7 @@ from binascii import hexlify import getpass +import inspect import os import socket import warnings @@ -32,18 +33,20 @@ from paramiko.config import SSH_PORT from paramiko.dsskey import DSSKey from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key from paramiko.hostkeys import HostKeys from paramiko.py3compat import string_types -from paramiko.resource import ResourceManager from paramiko.rsakey import RSAKey from paramiko.ssh_exception import ( - SSHException, BadHostKeyException, NoValidConnectionsError + SSHException, + BadHostKeyException, + NoValidConnectionsError, ) from paramiko.transport import Transport from paramiko.util import retry_on_signal, ClosingContextManager -class SSHClient (ClosingContextManager): +class SSHClient(ClosingContextManager): """ A high-level representation of a session with an SSH server. This class wraps `.Transport`, `.Channel`, and `.SFTPClient` to take care of most @@ -91,12 +94,12 @@ def load_system_host_keys(self, filename=None): :param str filename: the filename to read, or ``None`` - :raises IOError: + :raises: ``IOError`` -- if a filename was provided and the file could not be read """ if filename is None: # try the user's .ssh key file, and mask exceptions - filename = os.path.expanduser('~/.ssh/known_hosts') + filename = os.path.expanduser("~/.ssh/known_hosts") try: self._system_host_keys.load(filename) except IOError: @@ -118,7 +121,7 @@ def load_host_keys(self, filename): :param str filename: the filename to read - :raises IOError: if the filename could not be read + :raises: ``IOError`` -- if the filename could not be read """ self._host_keys_filename = filename self._host_keys.load(filename) @@ -131,7 +134,7 @@ def save_host_keys(self, filename): :param str filename: the filename to save to - :raises IOError: if the file could not be written + :raises: ``IOError`` -- if the file could not be written """ # update local host keys from file (in case other SSH clients @@ -139,10 +142,14 @@ def save_host_keys(self, filename): if self._host_keys_filename is not None: self.load_host_keys(self._host_keys_filename) - with open(filename, 'w') as f: + with open(filename, "w") as f: for hostname, keys in self._host_keys.items(): for keytype, key in keys.items(): - f.write('%s %s %s\n' % (hostname, keytype, key.get_base64())) + f.write( + "{} {} {}\n".format( + hostname, keytype, key.get_base64() + ) + ) def get_host_keys(self): """ @@ -168,16 +175,10 @@ def set_missing_host_key_policy(self, policy): Specifically: - * A **policy** is an instance of a "policy class", namely some subclass - of `.MissingHostKeyPolicy` such as `.RejectPolicy` (the default), - `.AutoAddPolicy`, `.WarningPolicy`, or a user-created subclass. - - .. note:: - This method takes class **instances**, not **classes** themselves. - Thus it must be called as e.g. - ``.set_missing_host_key_policy(WarningPolicy())`` and *not* - ``.set_missing_host_key_policy(WarningPolicy)``. - + * A **policy** is a "policy class" (or instance thereof), namely some + subclass of `.MissingHostKeyPolicy` such as `.RejectPolicy` (the + default), `.AutoAddPolicy`, `.WarningPolicy`, or a user-created + subclass. * A host key is **known** when it appears in the client object's cached host keys structures (those manipulated by `load_system_host_keys` and/or `load_host_keys`). @@ -186,6 +187,8 @@ def set_missing_host_key_policy(self, policy): the policy to use when receiving a host key from a previously-unknown server """ + if inspect.isclass(policy): + policy = policy() self._policy = policy def _families_and_addresses(self, hostname, port): @@ -197,14 +200,17 @@ def _families_and_addresses(self, hostname, port): :returns: Yields an iterable of ``(family, address)`` tuples """ guess = True - addrinfos = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM) + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) for (family, socktype, proto, canonname, sockaddr) in addrinfos: if socktype == socket.SOCK_STREAM: yield family, sockaddr guess = False - # some OS like AIX don't indicate SOCK_STREAM support, so just guess. :( - # We only do this if we did not get a single result marked as socktype == SOCK_STREAM. + # some OS like AIX don't indicate SOCK_STREAM support, so just + # guess. :( We only do this if we did not get a single result marked + # as socktype == SOCK_STREAM. if guess: for family, _, _, _, sockaddr in addrinfos: yield family, sockaddr @@ -226,7 +232,11 @@ def connect( gss_kex=False, gss_deleg_creds=True, gss_host=None, - banner_timeout=None + banner_timeout=None, + auth_timeout=None, + gss_trust_dns=True, + passphrase=None, + disabled_algorithms=None, ): """ Connect to an SSH server and authenticate to it. The server's host key @@ -239,9 +249,23 @@ def connect( Authentication is attempted in the following order of priority: - The ``pkey`` or ``key_filename`` passed in (if any) + + - ``key_filename`` may contain OpenSSH public certificate paths + as well as regular private-key paths; when files ending in + ``-cert.pub`` are found, they are assumed to match a private + key, and both components will be loaded. (The private key + itself does *not* need to be listed in ``key_filename`` for + this to occur - *just* the certificate.) + - Any key we can find through an SSH agent - Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ``~/.ssh/`` + + - When OpenSSH-style public certificates exist that match an + existing such private key (so e.g. one has ``id_rsa`` and + ``id_rsa-cert.pub``) the certificate will be loaded alongside + the private key and used for authentication. + - Plain username/password auth, if a password was given If a private key requires a password to unlock it, and a password is @@ -253,11 +277,14 @@ def connect( the username to authenticate as (defaults to the current local username) :param str password: - a password to use for authentication or for unlocking a private key + Used for password authentication; is also used for private key + decryption if ``passphrase`` is not given. + :param str passphrase: + Used for decrypting private keys. :param .PKey pkey: an optional private key to use for authentication :param str key_filename: - the filename, or list of filenames, of optional private key(s) to - try for authentication + the filename, or list of filenames, of optional private key(s) + and/or certs to try for authentication :param float timeout: an optional timeout (in seconds) for the TCP connect :param bool allow_agent: @@ -276,19 +303,36 @@ def connect( :param bool gss_deleg_creds: Delegate GSS-API client credentials or not :param str gss_host: The targets name in the kerberos database. default: hostname + :param bool gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). :param float banner_timeout: an optional timeout (in seconds) to wait for the SSH banner to be presented. - - :raises BadHostKeyException: if the server's host key could not be + :param float auth_timeout: an optional timeout (in seconds) to wait for + an authentication response. + :param dict disabled_algorithms: + an optional dict passed directly to `.Transport` and its keyword + argument of the same name. + + :raises: + `.BadHostKeyException` -- if the server's host key could not be verified - :raises AuthenticationException: if authentication failed - :raises SSHException: if there was any other error connecting or + :raises: `.AuthenticationException` -- if authentication failed + :raises: + `.SSHException` -- if there was any other error connecting or establishing an SSH session :raises socket.error: if a socket error occurred while connecting .. versionchanged:: 1.15 Added the ``banner_timeout``, ``gss_auth``, ``gss_kex``, ``gss_deleg_creds`` and ``gss_host`` arguments. + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. + .. versionchanged:: 2.4 + Added the ``passphrase`` argument. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` argument. """ if not sock: errors = {} @@ -323,47 +367,60 @@ def connect( if len(errors) == len(to_try): raise NoValidConnectionsError(errors) - t = self._transport = Transport(sock, gss_kex=gss_kex, gss_deleg_creds=gss_deleg_creds) + t = self._transport = Transport( + sock, + gss_kex=gss_kex, + gss_deleg_creds=gss_deleg_creds, + disabled_algorithms=disabled_algorithms, + ) t.use_compression(compress=compress) - if gss_kex and gss_host is None: - t.set_gss_host(hostname) - elif gss_kex and gss_host is not None: - t.set_gss_host(gss_host) - else: - pass + t.set_gss_host( + # t.hostname may be None, but GSS-API requires a target name. + # Therefore use hostname as fallback. + gss_host=gss_host or hostname, + trust_dns=gss_trust_dns, + gssapi_requested=gss_auth or gss_kex, + ) if self._log_channel is not None: t.set_log_channel(self._log_channel) if banner_timeout is not None: t.banner_timeout = banner_timeout - t.start_client() - ResourceManager.register(self, t) - - server_key = t.get_remote_server_key() - keytype = server_key.get_name() + if auth_timeout is not None: + t.auth_timeout = auth_timeout if port == SSH_PORT: server_hostkey_name = hostname else: - server_hostkey_name = "[%s]:%d" % (hostname, port) + server_hostkey_name = "[{}]:{}".format(hostname, port) + our_server_keys = None + + our_server_keys = self._system_host_keys.get(server_hostkey_name) + if our_server_keys is None: + our_server_keys = self._host_keys.get(server_hostkey_name) + if our_server_keys is not None: + keytype = our_server_keys.keys()[0] + sec_opts = t.get_security_options() + other_types = [x for x in sec_opts.key_types if x != keytype] + sec_opts.key_types = [keytype] + other_types + + t.start_client(timeout=timeout) # If GSS-API Key Exchange is performed we are not required to check the # host key, because the host is authenticated via GSS-API / SSPI as # well as our client. - if not self._transport.use_gss_kex: - our_server_key = self._system_host_keys.get(server_hostkey_name, - {}).get(keytype, None) - if our_server_key is None: - our_server_key = self._host_keys.get(server_hostkey_name, - {}).get(keytype, None) - if our_server_key is None: - # will raise exception if the key is rejected; let that fall out - self._policy.missing_host_key(self, server_hostkey_name, - server_key) - # if the callback returns, assume the key is ok - our_server_key = server_key - - if server_key != our_server_key: - raise BadHostKeyException(hostname, server_key, our_server_key) + if not self._transport.gss_kex_used: + server_key = t.get_remote_server_key() + if our_server_keys is None: + # will raise exception if the key is rejected + self._policy.missing_host_key( + self, server_hostkey_name, server_key + ) + else: + our_key = our_server_keys.get(server_key.get_name()) + if our_key != server_key: + if our_key is None: + our_key = list(our_server_keys.values())[0] + raise BadHostKeyException(hostname, server_key, our_key) if username is None: username = getpass.getuser() @@ -374,20 +431,32 @@ def connect( key_filenames = [key_filename] else: key_filenames = key_filename - if gss_host is None: - gss_host = hostname - self._auth(username, password, pkey, key_filenames, allow_agent, - look_for_keys, gss_auth, gss_kex, gss_deleg_creds, gss_host) + + self._auth( + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + t.gss_host, + passphrase, + ) def close(self): """ Close this SSHClient and its underlying `.Transport`. + This should be called anytime you are done using the client object. + .. warning:: - Failure to do this may, in some situations, cause your Python - interpreter to hang at shutdown (often due to race conditions). - It's good practice to `close` your client objects anytime you're - done using them, instead of relying on garbage collection. + Paramiko registers garbage collection hooks that will try to + automatically close connections for you, but this is not presently + reliable. Failure to explicitly close your client after use may + lead to end-of-process hangs! """ if self._transport is None: return @@ -398,7 +467,14 @@ def close(self): self._agent.close() self._agent = None - def exec_command(self, command, bufsize=-1, timeout=None, get_pty=False): + def exec_command( + self, + command, + bufsize=-1, + timeout=None, + get_pty=False, + environment=None, + ): """ Execute a command on the SSH server. A new `.Channel` is opened and the requested command is executed. The command's input and output @@ -410,25 +486,48 @@ def exec_command(self, command, bufsize=-1, timeout=None, get_pty=False): interpreted the same way as by the built-in ``file()`` function in Python :param int timeout: - set command's channel timeout. See `Channel.settimeout`.settimeout + set command's channel timeout. See `.Channel.settimeout` + :param bool get_pty: + Request a pseudo-terminal from the server (default ``False``). + See `.Channel.get_pty` + :param dict environment: + a dict of shell environment variables, to be merged into the + default environment that the remote command executes within. + + .. warning:: + Servers may silently reject some environment variables; see the + warning in `.Channel.set_environment_variable` for details. + :return: the stdin, stdout, and stderr of the executing command, as a 3-tuple - :raises SSHException: if the server fails to execute the command + :raises: `.SSHException` -- if the server fails to execute the command + + .. versionchanged:: 1.10 + Added the ``get_pty`` kwarg. """ chan = self._transport.open_session(timeout=timeout) if get_pty: chan.get_pty() chan.settimeout(timeout) + if environment: + chan.update_environment(environment) chan.exec_command(command) - stdin = chan.makefile('wb', bufsize) - stdout = chan.makefile('r', bufsize) - stderr = chan.makefile_stderr('r', bufsize) + stdin = chan.makefile_stdin("wb", bufsize) + stdout = chan.makefile("r", bufsize) + stderr = chan.makefile_stderr("r", bufsize) return stdin, stdout, stderr - def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0, - height_pixels=0): + def invoke_shell( + self, + term="vt100", + width=80, + height=24, + width_pixels=0, + height_pixels=0, + environment=None, + ): """ Start an interactive shell session on the SSH server. A new `.Channel` is opened and connected to a pseudo-terminal using the requested @@ -440,9 +539,10 @@ def invoke_shell(self, term='vt100', width=80, height=24, width_pixels=0, :param int height: the height (in characters) of the terminal window :param int width_pixels: the width (in pixels) of the terminal window :param int height_pixels: the height (in pixels) of the terminal window + :param dict environment: the command's environment :return: a new `.Channel` connected to the remote shell - :raises SSHException: if the server fails to invoke a shell + :raises: `.SSHException` -- if the server fails to invoke a shell """ chan = self._transport.open_session() chan.get_pty(term, width, height, width_pixels, height_pixels) @@ -467,24 +567,71 @@ def get_transport(self): """ return self._transport - def _auth(self, username, password, pkey, key_filenames, allow_agent, - look_for_keys, gss_auth, gss_kex, gss_deleg_creds, gss_host): + def _key_from_filepath(self, filename, klass, password): + """ + Attempt to derive a `.PKey` from given string path ``filename``: + + - If ``filename`` appears to be a cert, the matching private key is + loaded. + - Otherwise, the filename is assumed to be a private key, and the + matching public cert will be loaded if it exists. + """ + cert_suffix = "-cert.pub" + # Assume privkey, not cert, by default + if filename.endswith(cert_suffix): + key_path = filename[: -len(cert_suffix)] + cert_path = filename + else: + key_path = filename + cert_path = filename + cert_suffix + # Blindly try the key path; if no private key, nothing will work. + key = klass.from_private_key_file(key_path, password) + # TODO: change this to 'Loading' instead of 'Trying' sometime; probably + # when #387 is released, since this is a critical log message users are + # likely testing/filtering for (bah.) + msg = "Trying discovered key {} in {}".format( + hexlify(key.get_fingerprint()), key_path + ) + self._log(DEBUG, msg) + # Attempt to load cert if it exists. + if os.path.isfile(cert_path): + key.load_certificate(cert_path) + self._log(DEBUG, "Adding public certificate {}".format(cert_path)) + return key + + def _auth( + self, + username, + password, + pkey, + key_filenames, + allow_agent, + look_for_keys, + gss_auth, + gss_kex, + gss_deleg_creds, + gss_host, + passphrase, + ): """ Try, in order: - - The key passed in, if one was passed in. + - The key(s) passed in, if one was passed in. - Any key we can find through an SSH agent (if allowed). - Any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ~/.ssh/ (if allowed). - Plain username/password auth, if a password was given. - (The password might be needed to unlock a private key, or for - two-factor authentication [for which it is required].) + (The password might be needed to unlock a private key [if 'passphrase' + isn't also given], or for two-factor authentication [for which it is + required].) """ saved_exception = None two_factor = False allowed_types = set() - two_factor_types = set(['keyboard-interactive','password']) + two_factor_types = {"keyboard-interactive", "password"} + if passphrase is None and password is not None: + passphrase = password # If GSS-API support and GSS-PI Key Exchange was performed, we attempt # authentication with gssapi-keyex. @@ -501,17 +648,24 @@ def _auth(self, username, password, pkey, key_filenames, allow_agent, # why should we do that again? if gss_auth: try: - self._transport.auth_gssapi_with_mic(username, gss_host, - gss_deleg_creds) - return + return self._transport.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds + ) except Exception as e: saved_exception = e if pkey is not None: try: - self._log(DEBUG, 'Trying SSH key %s' % hexlify(pkey.get_fingerprint())) - allowed_types = set(self._transport.auth_publickey(username, pkey)) - two_factor = (allowed_types & two_factor_types) + self._log( + DEBUG, + "Trying SSH key {}".format( + hexlify(pkey.get_fingerprint()) + ), + ) + allowed_types = set( + self._transport.auth_publickey(username, pkey) + ) + two_factor = allowed_types & two_factor_types if not two_factor: return except SSHException as e: @@ -519,12 +673,15 @@ def _auth(self, username, password, pkey, key_filenames, allow_agent, if not two_factor: for key_filename in key_filenames: - for pkey_class in (RSAKey, DSSKey, ECDSAKey): + for pkey_class in (RSAKey, DSSKey, ECDSAKey, Ed25519Key): try: - key = pkey_class.from_private_key_file(key_filename, password) - self._log(DEBUG, 'Trying key %s from %s' % (hexlify(key.get_fingerprint()), key_filename)) - allowed_types = set(self._transport.auth_publickey(username, key)) - two_factor = (allowed_types & two_factor_types) + key = self._key_from_filepath( + key_filename, pkey_class, passphrase + ) + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types if not two_factor: return break @@ -537,10 +694,14 @@ def _auth(self, username, password, pkey, key_filenames, allow_agent, for key in self._agent.get_keys(): try: - self._log(DEBUG, 'Trying SSH agent key %s' % hexlify(key.get_fingerprint())) - # for 2-factor auth a successfully auth'd key password will return an allowed 2fac auth method - allowed_types = set(self._transport.auth_publickey(username, key)) - two_factor = (allowed_types & two_factor_types) + id_ = hexlify(key.get_fingerprint()) + self._log(DEBUG, "Trying SSH agent key {}".format(id_)) + # for 2-factor auth a successfully auth'd key password + # will return an allowed 2fac auth method + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types if not two_factor: return break @@ -549,36 +710,38 @@ def _auth(self, username, password, pkey, key_filenames, allow_agent, if not two_factor: keyfiles = [] - rsa_key = os.path.expanduser('~/.ssh/id_rsa') - dsa_key = os.path.expanduser('~/.ssh/id_dsa') - ecdsa_key = os.path.expanduser('~/.ssh/id_ecdsa') - if os.path.isfile(rsa_key): - keyfiles.append((RSAKey, rsa_key)) - if os.path.isfile(dsa_key): - keyfiles.append((DSSKey, dsa_key)) - if os.path.isfile(ecdsa_key): - keyfiles.append((ECDSAKey, ecdsa_key)) - # look in ~/ssh/ for windows users: - rsa_key = os.path.expanduser('~/ssh/id_rsa') - dsa_key = os.path.expanduser('~/ssh/id_dsa') - ecdsa_key = os.path.expanduser('~/ssh/id_ecdsa') - if os.path.isfile(rsa_key): - keyfiles.append((RSAKey, rsa_key)) - if os.path.isfile(dsa_key): - keyfiles.append((DSSKey, dsa_key)) - if os.path.isfile(ecdsa_key): - keyfiles.append((ECDSAKey, ecdsa_key)) + + for keytype, name in [ + (RSAKey, "rsa"), + (DSSKey, "dsa"), + (ECDSAKey, "ecdsa"), + (Ed25519Key, "ed25519"), + ]: + # ~/ssh/ is for windows + for directory in [".ssh", "ssh"]: + full_path = os.path.expanduser( + "~/{}/id_{}".format(directory, name) + ) + if os.path.isfile(full_path): + # TODO: only do this append if below did not run + keyfiles.append((keytype, full_path)) + if os.path.isfile(full_path + "-cert.pub"): + keyfiles.append((keytype, full_path + "-cert.pub")) if not look_for_keys: keyfiles = [] for pkey_class, filename in keyfiles: try: - key = pkey_class.from_private_key_file(filename, password) - self._log(DEBUG, 'Trying discovered key %s in %s' % (hexlify(key.get_fingerprint()), filename)) - # for 2-factor auth a successfully auth'd key will result in ['password'] - allowed_types = set(self._transport.auth_publickey(username, key)) - two_factor = (allowed_types & two_factor_types) + key = self._key_from_filepath( + filename, pkey_class, passphrase + ) + # for 2-factor auth a successfully auth'd key will result + # in ['password'] + allowed_types = set( + self._transport.auth_publickey(username, key) + ) + two_factor = allowed_types & two_factor_types if not two_factor: return break @@ -601,13 +764,13 @@ def _auth(self, username, password, pkey, key_filenames, allow_agent, # if we got an auth-failed exception earlier, re-raise it if saved_exception is not None: raise saved_exception - raise SSHException('No authentication methods available') + raise SSHException("No authentication methods available") def _log(self, level, msg): self._transport._log(level, msg) -class MissingHostKeyPolicy (object): +class MissingHostKeyPolicy(object): """ Interface for defining the policy that `.SSHClient` should use when the SSH server's hostname is not in either the system host keys or the @@ -628,7 +791,7 @@ def missing_host_key(self, client, hostname, key): pass -class AutoAddPolicy (MissingHostKeyPolicy): +class AutoAddPolicy(MissingHostKeyPolicy): """ Policy for automatically adding the hostname and new host key to the local `.HostKeys` object, and saving it. This is used by `.SSHClient`. @@ -638,27 +801,41 @@ def missing_host_key(self, client, hostname, key): client._host_keys.add(hostname, key.get_name(), key) if client._host_keys_filename is not None: client.save_host_keys(client._host_keys_filename) - client._log(DEBUG, 'Adding %s host key for %s: %s' % - (key.get_name(), hostname, hexlify(key.get_fingerprint()))) + client._log( + DEBUG, + "Adding {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) -class RejectPolicy (MissingHostKeyPolicy): +class RejectPolicy(MissingHostKeyPolicy): """ Policy for automatically rejecting the unknown hostname & key. This is used by `.SSHClient`. """ def missing_host_key(self, client, hostname, key): - client._log(DEBUG, 'Rejecting %s host key for %s: %s' % - (key.get_name(), hostname, hexlify(key.get_fingerprint()))) - raise SSHException('Server %r not found in known_hosts' % hostname) - - -class WarningPolicy (MissingHostKeyPolicy): + client._log( + DEBUG, + "Rejecting {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ), + ) + raise SSHException( + "Server {!r} not found in known_hosts".format(hostname) + ) + + +class WarningPolicy(MissingHostKeyPolicy): """ Policy for logging a Python-style warning for an unknown host key, but accepting it. This is used by `.SSHClient`. """ + def missing_host_key(self, client, hostname, key): - warnings.warn('Unknown %s host key for %s: %s' % - (key.get_name(), hostname, hexlify(key.get_fingerprint()))) + warnings.warn( + "Unknown {} host key for {}: {}".format( + key.get_name(), hostname, hexlify(key.get_fingerprint()) + ) + ) diff --git a/paramiko/common.py b/paramiko/common.py index 0b0cc2a7e..55dd4bdf2 100644 --- a/paramiko/common.py +++ b/paramiko/common.py @@ -20,23 +20,48 @@ Common constants and global variables. """ import logging -from paramiko.py3compat import byte_chr, PY2, bytes_types, string_types, b, long - -MSG_DISCONNECT, MSG_IGNORE, MSG_UNIMPLEMENTED, MSG_DEBUG, MSG_SERVICE_REQUEST, \ - MSG_SERVICE_ACCEPT = range(1, 7) -MSG_KEXINIT, MSG_NEWKEYS = range(20, 22) -MSG_USERAUTH_REQUEST, MSG_USERAUTH_FAILURE, MSG_USERAUTH_SUCCESS, \ - MSG_USERAUTH_BANNER = range(50, 54) +from paramiko.py3compat import byte_chr, PY2, long, b + +( + MSG_DISCONNECT, + MSG_IGNORE, + MSG_UNIMPLEMENTED, + MSG_DEBUG, + MSG_SERVICE_REQUEST, + MSG_SERVICE_ACCEPT, + MSG_EXT_INFO, +) = range(1, 8) +(MSG_KEXINIT, MSG_NEWKEYS) = range(20, 22) +( + MSG_USERAUTH_REQUEST, + MSG_USERAUTH_FAILURE, + MSG_USERAUTH_SUCCESS, + MSG_USERAUTH_BANNER, +) = range(50, 54) MSG_USERAUTH_PK_OK = 60 -MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE = range(60, 62) -MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN = range(60, 62) -MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, MSG_USERAUTH_GSSAPI_ERROR,\ -MSG_USERAUTH_GSSAPI_ERRTOK, MSG_USERAUTH_GSSAPI_MIC = range(63, 67) -MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE = range(80, 83) -MSG_CHANNEL_OPEN, MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, \ - MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_DATA, MSG_CHANNEL_EXTENDED_DATA, \ - MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MSG_CHANNEL_REQUEST, \ - MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE = range(90, 101) +(MSG_USERAUTH_INFO_REQUEST, MSG_USERAUTH_INFO_RESPONSE) = range(60, 62) +(MSG_USERAUTH_GSSAPI_RESPONSE, MSG_USERAUTH_GSSAPI_TOKEN) = range(60, 62) +( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE, + MSG_USERAUTH_GSSAPI_ERROR, + MSG_USERAUTH_GSSAPI_ERRTOK, + MSG_USERAUTH_GSSAPI_MIC, +) = range(63, 67) +HIGHEST_USERAUTH_MESSAGE_ID = 79 +(MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE) = range(80, 83) +( + MSG_CHANNEL_OPEN, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, +) = range(90, 101) cMSG_DISCONNECT = byte_chr(MSG_DISCONNECT) cMSG_IGNORE = byte_chr(MSG_IGNORE) @@ -44,6 +69,7 @@ cMSG_DEBUG = byte_chr(MSG_DEBUG) cMSG_SERVICE_REQUEST = byte_chr(MSG_SERVICE_REQUEST) cMSG_SERVICE_ACCEPT = byte_chr(MSG_SERVICE_ACCEPT) +cMSG_EXT_INFO = byte_chr(MSG_EXT_INFO) cMSG_KEXINIT = byte_chr(MSG_KEXINIT) cMSG_NEWKEYS = byte_chr(MSG_NEWKEYS) cMSG_USERAUTH_REQUEST = byte_chr(MSG_USERAUTH_REQUEST) @@ -55,7 +81,9 @@ cMSG_USERAUTH_INFO_RESPONSE = byte_chr(MSG_USERAUTH_INFO_RESPONSE) cMSG_USERAUTH_GSSAPI_RESPONSE = byte_chr(MSG_USERAUTH_GSSAPI_RESPONSE) cMSG_USERAUTH_GSSAPI_TOKEN = byte_chr(MSG_USERAUTH_GSSAPI_TOKEN) -cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = byte_chr(MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE) +cMSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE = byte_chr( + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE +) cMSG_USERAUTH_GSSAPI_ERROR = byte_chr(MSG_USERAUTH_GSSAPI_ERROR) cMSG_USERAUTH_GSSAPI_ERRTOK = byte_chr(MSG_USERAUTH_GSSAPI_ERRTOK) cMSG_USERAUTH_GSSAPI_MIC = byte_chr(MSG_USERAUTH_GSSAPI_MIC) @@ -76,47 +104,48 @@ # for debugging: MSG_NAMES = { - MSG_DISCONNECT: 'disconnect', - MSG_IGNORE: 'ignore', - MSG_UNIMPLEMENTED: 'unimplemented', - MSG_DEBUG: 'debug', - MSG_SERVICE_REQUEST: 'service-request', - MSG_SERVICE_ACCEPT: 'service-accept', - MSG_KEXINIT: 'kexinit', - MSG_NEWKEYS: 'newkeys', - 30: 'kex30', - 31: 'kex31', - 32: 'kex32', - 33: 'kex33', - 34: 'kex34', - 40: 'kex40', - 41: 'kex41', - MSG_USERAUTH_REQUEST: 'userauth-request', - MSG_USERAUTH_FAILURE: 'userauth-failure', - MSG_USERAUTH_SUCCESS: 'userauth-success', - MSG_USERAUTH_BANNER: 'userauth--banner', - MSG_USERAUTH_PK_OK: 'userauth-60(pk-ok/info-request)', - MSG_USERAUTH_INFO_RESPONSE: 'userauth-info-response', - MSG_GLOBAL_REQUEST: 'global-request', - MSG_REQUEST_SUCCESS: 'request-success', - MSG_REQUEST_FAILURE: 'request-failure', - MSG_CHANNEL_OPEN: 'channel-open', - MSG_CHANNEL_OPEN_SUCCESS: 'channel-open-success', - MSG_CHANNEL_OPEN_FAILURE: 'channel-open-failure', - MSG_CHANNEL_WINDOW_ADJUST: 'channel-window-adjust', - MSG_CHANNEL_DATA: 'channel-data', - MSG_CHANNEL_EXTENDED_DATA: 'channel-extended-data', - MSG_CHANNEL_EOF: 'channel-eof', - MSG_CHANNEL_CLOSE: 'channel-close', - MSG_CHANNEL_REQUEST: 'channel-request', - MSG_CHANNEL_SUCCESS: 'channel-success', - MSG_CHANNEL_FAILURE: 'channel-failure', - MSG_USERAUTH_GSSAPI_RESPONSE: 'userauth-gssapi-response', - MSG_USERAUTH_GSSAPI_TOKEN: 'userauth-gssapi-token', - MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: 'userauth-gssapi-exchange-complete', - MSG_USERAUTH_GSSAPI_ERROR: 'userauth-gssapi-error', - MSG_USERAUTH_GSSAPI_ERRTOK: 'userauth-gssapi-error-token', - MSG_USERAUTH_GSSAPI_MIC: 'userauth-gssapi-mic' + MSG_DISCONNECT: "disconnect", + MSG_IGNORE: "ignore", + MSG_UNIMPLEMENTED: "unimplemented", + MSG_DEBUG: "debug", + MSG_SERVICE_REQUEST: "service-request", + MSG_SERVICE_ACCEPT: "service-accept", + MSG_KEXINIT: "kexinit", + MSG_EXT_INFO: "ext-info", + MSG_NEWKEYS: "newkeys", + 30: "kex30", + 31: "kex31", + 32: "kex32", + 33: "kex33", + 34: "kex34", + 40: "kex40", + 41: "kex41", + MSG_USERAUTH_REQUEST: "userauth-request", + MSG_USERAUTH_FAILURE: "userauth-failure", + MSG_USERAUTH_SUCCESS: "userauth-success", + MSG_USERAUTH_BANNER: "userauth--banner", + MSG_USERAUTH_PK_OK: "userauth-60(pk-ok/info-request)", + MSG_USERAUTH_INFO_RESPONSE: "userauth-info-response", + MSG_GLOBAL_REQUEST: "global-request", + MSG_REQUEST_SUCCESS: "request-success", + MSG_REQUEST_FAILURE: "request-failure", + MSG_CHANNEL_OPEN: "channel-open", + MSG_CHANNEL_OPEN_SUCCESS: "channel-open-success", + MSG_CHANNEL_OPEN_FAILURE: "channel-open-failure", + MSG_CHANNEL_WINDOW_ADJUST: "channel-window-adjust", + MSG_CHANNEL_DATA: "channel-data", + MSG_CHANNEL_EXTENDED_DATA: "channel-extended-data", + MSG_CHANNEL_EOF: "channel-eof", + MSG_CHANNEL_CLOSE: "channel-close", + MSG_CHANNEL_REQUEST: "channel-request", + MSG_CHANNEL_SUCCESS: "channel-success", + MSG_CHANNEL_FAILURE: "channel-failure", + MSG_USERAUTH_GSSAPI_RESPONSE: "userauth-gssapi-response", + MSG_USERAUTH_GSSAPI_TOKEN: "userauth-gssapi-token", + MSG_USERAUTH_GSSAPI_EXCHANGE_COMPLETE: "userauth-gssapi-exchange-complete", + MSG_USERAUTH_GSSAPI_ERROR: "userauth-gssapi-error", + MSG_USERAUTH_GSSAPI_ERRTOK: "userauth-gssapi-error-token", + MSG_USERAUTH_GSSAPI_MIC: "userauth-gssapi-mic", } @@ -125,23 +154,28 @@ # channel request failed reasons: -(OPEN_SUCCEEDED, - OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, - OPEN_FAILED_CONNECT_FAILED, - OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, - OPEN_FAILED_RESOURCE_SHORTAGE) = range(0, 5) +( + OPEN_SUCCEEDED, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_FAILED_CONNECT_FAILED, + OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, + OPEN_FAILED_RESOURCE_SHORTAGE, +) = range(0, 5) CONNECTION_FAILED_CODE = { - 1: 'Administratively prohibited', - 2: 'Connect failed', - 3: 'Unknown channel type', - 4: 'Resource shortage' + 1: "Administratively prohibited", + 2: "Connect failed", + 3: "Unknown channel type", + 4: "Resource shortage", } -DISCONNECT_SERVICE_NOT_AVAILABLE, DISCONNECT_AUTH_CANCELLED_BY_USER, \ - DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE = 7, 13, 14 +( + DISCONNECT_SERVICE_NOT_AVAILABLE, + DISCONNECT_AUTH_CANCELLED_BY_USER, + DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE, +) = (7, 13, 14) zero_byte = byte_chr(0) one_byte = byte_chr(1) @@ -160,15 +194,25 @@ def asbytes(s): - if not isinstance(s, bytes_types): - if isinstance(s, string_types): - s = b(s) - else: - try: - s = s.asbytes() - except Exception: - raise Exception('Unknown type') - return s + """ + Coerce to bytes if possible or return unchanged. + """ + try: + # Attempt to run through our version of b(), which does the Right Thing + # for string/unicode/buffer (Py2) or bytes/str (Py3), and raises + # TypeError if it's not one of those types. + return b(s) + except TypeError: + try: + # If it wasn't a string/byte/buffer type object, try calling an + # asbytes() method, which many of our internal classes implement. + return s.asbytes() + except AttributeError: + # Finally, just do nothing & assume this object is sufficiently + # byte-y or buffer-y that everything will work out (or that callers + # are capable of handling whatever it is.) + return s + xffffffff = long(0xffffffff) x80000000 = long(0x80000000) @@ -202,4 +246,4 @@ def asbytes(s): MIN_PACKET_SIZE = 2 ** 12 # Max windows size according to http://www.ietf.org/rfc/rfc4254.txt -MAX_WINDOW_SIZE = 2**32 -1 +MAX_WINDOW_SIZE = 2 ** 32 - 1 diff --git a/paramiko/compress.py b/paramiko/compress.py index b55f0b1dc..fa3b6aa35 100644 --- a/paramiko/compress.py +++ b/paramiko/compress.py @@ -23,15 +23,16 @@ import zlib -class ZlibCompressor (object): +class ZlibCompressor(object): def __init__(self): - self.z = zlib.compressobj(9) + # Use the default level of zlib compression + self.z = zlib.compressobj() def __call__(self, data): return self.z.compress(data) + self.z.flush(zlib.Z_FULL_FLUSH) -class ZlibDecompressor (object): +class ZlibDecompressor(object): def __init__(self): self.z = zlib.decompressobj() diff --git a/paramiko/config.py b/paramiko/config.py index 7374eb1a0..9c21e4e54 100644 --- a/paramiko/config.py +++ b/paramiko/config.py @@ -22,15 +22,29 @@ """ import fnmatch +import getpass import os import re import shlex import socket +from hashlib import sha1 +from functools import partial + +from .py3compat import StringIO + +invoke, invoke_import_error = None, None +try: + import invoke +except ImportError as e: + invoke_import_error = e + +from .ssh_exception import CouldNotCanonicalize, ConfigParseError + SSH_PORT = 22 -class SSHConfig (object): +class SSHConfig(object): """ Representation of config information as stored in the format used by OpenSSH. Queries can be made via `lookup`. The format is described in @@ -41,45 +55,115 @@ class SSHConfig (object): .. versionadded:: 1.6 """ - SETTINGS_REGEX = re.compile(r'(\w+)(?:\s*=\s*|\s+)(.+)') + SETTINGS_REGEX = re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)") + + # TODO: do a full scan of ssh.c & friends to make sure we're fully + # compatible across the board, e.g. OpenSSH 8.1 added %n to ProxyCommand. + TOKENS_BY_CONFIG_KEY = { + "controlpath": ["%C", "%h", "%l", "%L", "%n", "%p", "%r", "%u"], + "hostname": ["%h"], + "identityfile": ["%C", "~", "%d", "%h", "%l", "%u", "%r"], + "proxycommand": ["~", "%h", "%p", "%r"], + # Doesn't seem worth making this 'special' for now, it will fit well + # enough (no actual match-exec config key to be confused with). + "match-exec": ["%C", "%d", "%h", "%L", "%l", "%n", "%p", "%r", "%u"], + } def __init__(self): """ Create a new OpenSSH config object. + + Note: the newer alternate constructors `from_path`, `from_file` and + `from_text` are simpler to use, as they parse on instantiation. For + example, instead of:: + + config = SSHConfig() + config.parse(open("some-path.config") + + you could:: + + config = SSHConfig.from_file(open("some-path.config")) + # Or more directly: + config = SSHConfig.from_path("some-path.config") + # Or if you have arbitrary ssh_config text from some other source: + config = SSHConfig.from_text("Host foo\\n\\tUser bar") """ self._config = [] + @classmethod + def from_text(cls, text): + """ + Create a new, parsed `SSHConfig` from ``text`` string. + + .. versionadded:: 2.7 + """ + return cls.from_file(StringIO(text)) + + @classmethod + def from_path(cls, path): + """ + Create a new, parsed `SSHConfig` from the file found at ``path``. + + .. versionadded:: 2.7 + """ + with open(path) as flo: + return cls.from_file(flo) + + @classmethod + def from_file(cls, flo): + """ + Create a new, parsed `SSHConfig` from file-like object ``flo``. + + .. versionadded:: 2.7 + """ + obj = cls() + obj.parse(flo) + return obj + def parse(self, file_obj): """ Read an OpenSSH config from the given file object. :param file_obj: a file-like object to read the config file from """ - host = {"host": ['*'], "config": {}} + # Start out w/ implicit/anonymous global host-like block to hold + # anything not contained by an explicit one. + context = {"host": ["*"], "config": {}} for line in file_obj: # Strip any leading or trailing whitespace from the line. - # See https://github.com/paramiko/paramiko/issues/499 for more info. + # Refer to https://github.com/paramiko/paramiko/issues/499 line = line.strip() - if not line or line.startswith('#'): + # Skip blanks, comments + if not line or line.startswith("#"): continue + # Parse line into key, value match = re.match(self.SETTINGS_REGEX, line) if not match: - raise Exception("Unparsable line %s" % line) + raise ConfigParseError("Unparsable line {}".format(line)) key = match.group(1).lower() value = match.group(2) - - if key == 'host': - self._config.append(host) - host = { - 'host': self._get_hosts(value), - 'config': {} - } - elif key == 'proxycommand' and value.lower() == 'none': + + # Host keyword triggers switch to new block/context + if key in ("host", "match"): + self._config.append(context) + context = {"config": {}} + if key == "host": + # TODO 3.0: make these real objects or at least name this + # "hosts" to acknowledge it's an iterable. (Doing so prior + # to 3.0, despite it being a private API, feels bad - + # surely such an old codebase has folks actually relying on + # these keys.) + context["host"] = self._get_hosts(value) + else: + context["matches"] = self._get_matches(value) + # Special-case for noop ProxyCommands + elif key == "proxycommand" and value.lower() == "none": # Store 'none' as None; prior to 3.x, it will get stripped out # at the end (for compatibility with issue #415). After 3.x, it # will simply not get stripped, leaving a nice explicit marker. - host['config'][key] = None + context["config"][key] = None + # All other keywords get stored, directly or via append else: if value.startswith('"') and value.endswith('"'): value = value[1:-1] @@ -87,24 +171,25 @@ def parse(self, file_obj): # identityfile, localforward, remoteforward keys are special # cases, since they are allowed to be specified multiple times # and they should be tried in order of specification. - if key in ['identityfile', 'localforward', 'remoteforward']: - if key in host['config']: - host['config'][key].append(value) + if key in ["identityfile", "localforward", "remoteforward"]: + if key in context["config"]: + context["config"][key].append(value) else: - host['config'][key] = [value] - elif key not in host['config']: - host['config'][key] = value - self._config.append(host) + context["config"][key] = [value] + elif key not in context["config"]: + context["config"][key] = value + # Store last 'open' block and we're done + self._config.append(context) def lookup(self, hostname): """ - Return a dict of config options for a given hostname. + Return a dict (`SSHConfigDict`) of config options for a given hostname. The host-matching rules of OpenSSH's ``ssh_config`` man page are used: For each parameter, the first obtained value will be used. The - configuration files contain sections separated by ``Host`` - specifications, and that section is only applied for hosts that match - one of the patterns given in the specification. + configuration files contain sections separated by ``Host`` and/or + ``Match`` specifications, and that section is only applied for hosts + which match the given patterns or keywords Since the first obtained value for each parameter is used, more host- specific declarations should be given near the beginning of the file, @@ -114,29 +199,119 @@ def lookup(self, hostname): ``"port"``, not ``"Port"``. The values are processed according to the rules for substitution variable expansion in ``ssh_config``. + Finally, please see the docs for `SSHConfigDict` for deeper info on + features such as optional type conversion methods, e.g.:: + + conf = my_config.lookup('myhost') + assert conf['passwordauthentication'] == 'yes' + assert conf.as_bool('passwordauthentication') is True + + .. note:: + If there is no explicitly configured ``HostName`` value, it will be + set to the being-looked-up hostname, which is as close as we can + get to OpenSSH's behavior around that particular option. + :param str hostname: the hostname to lookup + + .. versionchanged:: 2.5 + Returns `SSHConfigDict` objects instead of dict literals. + .. versionchanged:: 2.7 + Added canonicalization support. + .. versionchanged:: 2.7 + Added ``Match`` support. """ - matches = [ - config for config in self._config - if self._allowed(config['host'], hostname) - ] - - ret = {} - for match in matches: - for key, value in match['config'].items(): - if key not in ret: + # First pass + options = self._lookup(hostname=hostname) + # Inject HostName if it was not set (this used to be done incidentally + # during tokenization, for some reason). + if "hostname" not in options: + options["hostname"] = hostname + # Handle canonicalization + canon = options.get("canonicalizehostname", None) in ("yes", "always") + maxdots = int(options.get("canonicalizemaxdots", 1)) + if canon and hostname.count(".") <= maxdots: + # NOTE: OpenSSH manpage does not explicitly state this, but its + # implementation for CanonicalDomains is 'split on any whitespace'. + domains = options["canonicaldomains"].split() + hostname = self.canonicalize(hostname, options, domains) + # Overwrite HostName again here (this is also what OpenSSH does) + options["hostname"] = hostname + options = self._lookup(hostname, options, canonical=True) + return options + + def _lookup(self, hostname, options=None, canonical=False): + # Init + if options is None: + options = SSHConfigDict() + # Iterate all stanzas, applying any that match, in turn (so that things + # like Match can reference currently understood state) + for context in self._config: + if not ( + self._pattern_matches(context.get("host", []), hostname) + or self._does_match( + context.get("matches", []), hostname, canonical, options + ) + ): + continue + for key, value in context["config"].items(): + if key not in options: # Create a copy of the original value, # else it will reference the original list # in self._config and update that value too # when the extend() is being called. - ret[key] = value[:] if value is not None else value - elif key == 'identityfile': - ret[key].extend(value) - ret = self._expand_variables(ret, hostname) + options[key] = value[:] if value is not None else value + elif key == "identityfile": + options[key].extend( + x for x in value if x not in options[key] + ) + # Expand variables in resulting values (besides 'Match exec' which was + # already handled above) + options = self._expand_variables(options, hostname) # TODO: remove in 3.x re #670 - if 'proxycommand' in ret and ret['proxycommand'] is None: - del ret['proxycommand'] - return ret + if "proxycommand" in options and options["proxycommand"] is None: + del options["proxycommand"] + return options + + def canonicalize(self, hostname, options, domains): + """ + Return canonicalized version of ``hostname``. + + :param str hostname: Target hostname. + :param options: An `SSHConfigDict` from a previous lookup pass. + :param domains: List of domains (e.g. ``["paramiko.org"]``). + + :returns: A canonicalized hostname if one was found, else ``None``. + + .. versionadded:: 2.7 + """ + found = False + for domain in domains: + candidate = "{}.{}".format(hostname, domain) + family_specific = _addressfamily_host_lookup(candidate, options) + if family_specific is not None: + # TODO: would we want to dig deeper into other results? e.g. to + # find something that satisfies PermittedCNAMEs when that is + # implemented? + found = family_specific[0] + else: + # TODO: what does ssh use here and is there a reason to use + # that instead of gethostbyname? + try: + found = socket.gethostbyname(candidate) + except socket.gaierror: + pass + if found: + # TODO: follow CNAME (implied by found != candidate?) if + # CanonicalizePermittedCNAMEs allows it + return candidate + # If we got here, it means canonicalization failed. + # When CanonicalizeFallbackLocal is undefined or 'yes', we just spit + # back the original hostname. + if options.get("canonicalizefallbacklocal", "yes") == "yes": + return hostname + # And here, we failed AND fallback was set to a non-yes value, so we + # need to get mad. + raise CouldNotCanonicalize(hostname) def get_hostnames(self): """ @@ -145,89 +320,177 @@ def get_hostnames(self): """ hosts = set() for entry in self._config: - hosts.update(entry['host']) + hosts.update(entry["host"]) return hosts - def _allowed(self, hosts, hostname): + def _pattern_matches(self, patterns, target): + # Convenience auto-splitter if not already a list + if hasattr(patterns, "split"): + patterns = patterns.split(",") match = False - for host in hosts: - if host.startswith('!') and fnmatch.fnmatch(hostname, host[1:]): + for pattern in patterns: + # Short-circuit if target matches a negated pattern + if pattern.startswith("!") and fnmatch.fnmatch( + target, pattern[1:] + ): return False - elif fnmatch.fnmatch(hostname, host): + # Flag a match, but continue (in case of later negation) if regular + # match occurs + elif fnmatch.fnmatch(target, pattern): match = True return match - def _expand_variables(self, config, hostname): - """ - Return a dict of config options with expanded substitutions - for a given hostname. + # TODO 3.0: remove entirely (is now unused internally) + def _allowed(self, hosts, hostname): + return self._pattern_matches(hosts, hostname) + + def _does_match(self, match_list, target_hostname, canonical, options): + matched = [] + candidates = match_list[:] + local_username = getpass.getuser() + while candidates: + candidate = candidates.pop(0) + passed = None + # Obtain latest host/user value every loop, so later Match may + # reference values assigned within a prior Match. + configured_host = options.get("hostname", None) + configured_user = options.get("user", None) + type_, param = candidate["type"], candidate["param"] + # Canonical is a hard pass/fail based on whether this is a + # canonicalized re-lookup. + if type_ == "canonical": + if self._should_fail(canonical, candidate): + return False + # The parse step ensures we only see this by itself or after + # canonical, so it's also an easy hard pass. (No negation here as + # that would be uh, pretty weird?) + elif type_ == "all": + return True + # From here, we are testing various non-hard criteria, + # short-circuiting only on fail + elif type_ == "host": + hostval = configured_host or target_hostname + passed = self._pattern_matches(param, hostval) + elif type_ == "originalhost": + passed = self._pattern_matches(param, target_hostname) + elif type_ == "user": + user = configured_user or local_username + passed = self._pattern_matches(param, user) + elif type_ == "localuser": + passed = self._pattern_matches(param, local_username) + elif type_ == "exec": + exec_cmd = self._tokenize( + options, target_hostname, "match-exec", param + ) + # This is the laziest spot in which we can get mad about an + # inability to import Invoke. + if invoke is None: + raise invoke_import_error + # Like OpenSSH, we 'redirect' stdout but let stderr bubble up + passed = invoke.run(exec_cmd, hide="stdout", warn=True).ok + # Tackle any 'passed, but was negated' results from above + if passed is not None and self._should_fail(passed, candidate): + return False + # Made it all the way here? Everything matched! + matched.append(candidate) + # Did anything match? (To be treated as bool, usually.) + return matched - Please refer to man ``ssh_config`` for the parameters that - are replaced. + def _should_fail(self, would_pass, candidate): + return would_pass if candidate["negate"] else not would_pass - :param dict config: the config for the hostname - :param str hostname: the hostname that the config belongs to + def _tokenize(self, config, target_hostname, key, value): """ + Tokenize a string based on current config/hostname data. - if 'hostname' in config: - config['hostname'] = config['hostname'].replace('%h', hostname) - else: - config['hostname'] = hostname + :param config: Current config data. + :param target_hostname: Original target connection hostname. + :param key: Config key being tokenized (used to filter token list). + :param value: Config value being tokenized. - if 'port' in config: - port = config['port'] + :returns: The tokenized version of the input ``value`` string. + """ + allowed_tokens = self._allowed_tokens(key) + # Short-circuit if no tokenization possible + if not allowed_tokens: + return value + # Obtain potentially configured hostname, for use with %h. + # Special-case where we are tokenizing the hostname itself, to avoid + # replacing %h with a %h-bearing value, etc. + configured_hostname = target_hostname + if key != "hostname": + configured_hostname = config.get("hostname", configured_hostname) + # Ditto the rest of the source values + if "port" in config: + port = config["port"] else: port = SSH_PORT - - user = os.getenv('USER') - if 'user' in config: - remoteuser = config['user'] + user = getpass.getuser() + if "user" in config: + remoteuser = config["user"] else: remoteuser = user + local_hostname = socket.gethostname().split(".")[0] + local_fqdn = LazyFqdn(config, local_hostname) + homedir = os.path.expanduser("~") + tohash = local_hostname + target_hostname + repr(port) + remoteuser + # The actual tokens! + replacements = { + # TODO: %%??? + "%C": sha1(tohash.encode()).hexdigest(), + "%d": homedir, + "%h": configured_hostname, + # TODO: %i? + "%L": local_hostname, + "%l": local_fqdn, + # also this is pseudo buggy when not in Match exec mode so document + # that. also WHY is that the case?? don't we do all of this late? + "%n": target_hostname, + "%p": port, + "%r": remoteuser, + # TODO: %T? don't believe this is possible however + "%u": user, + "~": homedir, + } + # Do the thing with the stuff + tokenized = value + for find, replace in replacements.items(): + if find not in allowed_tokens: + continue + tokenized = tokenized.replace(find, str(replace)) + # TODO: log? eg that value -> tokenized + return tokenized - host = socket.gethostname().split('.')[0] - fqdn = LazyFqdn(config, host) - homedir = os.path.expanduser('~') - replacements = {'controlpath': - [ - ('%h', config['hostname']), - ('%l', fqdn), - ('%L', host), - ('%n', hostname), - ('%p', port), - ('%r', remoteuser), - ('%u', user) - ], - 'identityfile': - [ - ('~', homedir), - ('%d', homedir), - ('%h', config['hostname']), - ('%l', fqdn), - ('%u', user), - ('%r', remoteuser) - ], - 'proxycommand': - [ - ('%h', config['hostname']), - ('%p', port), - ('%r', remoteuser) - ] - } + def _allowed_tokens(self, key): + """ + Given config ``key``, return list of token strings to tokenize. + + .. note:: + This feels like it wants to eventually go away, but is used to + preserve as-strict-as-possible compatibility with OpenSSH, which + for whatever reason only applies some tokens to some config keys. + """ + return self.TOKENS_BY_CONFIG_KEY.get(key, []) + + def _expand_variables(self, config, target_hostname): + """ + Return a dict of config options with expanded substitutions + for a given original & current target hostname. + Please refer to :doc:`/api/config` for details. + + :param dict config: the currently parsed config + :param str hostname: the hostname whose config is being looked up + """ for k in config: if config[k] is None: continue - if k in replacements: - for find, replace in replacements[k]: - if isinstance(config[k], list): - for item in range(len(config[k])): - if find in config[k][item]: - config[k][item] = config[k][item].\ - replace(find, str(replace)) - else: - if find in config[k]: - config[k] = config[k].replace(find, str(replace)) + tokenizer = partial(self._tokenize, config, target_hostname, k) + if isinstance(config[k], list): + for i, value in enumerate(config[k]): + config[k][i] = tokenizer(value) + else: + config[k] = tokenizer(config[k]) return config def _get_hosts(self, host): @@ -237,7 +500,88 @@ def _get_hosts(self, host): try: return shlex.split(host) except ValueError: - raise Exception("Unparsable host %s" % host) + raise ConfigParseError("Unparsable host {}".format(host)) + + def _get_matches(self, match): + """ + Parse a specific Match config line into a list-of-dicts for its values. + + Performs some parse-time validation as well. + """ + matches = [] + tokens = shlex.split(match) + while tokens: + match = {"type": None, "param": None, "negate": False} + type_ = tokens.pop(0) + # Handle per-keyword negation + if type_.startswith("!"): + match["negate"] = True + type_ = type_[1:] + match["type"] = type_ + # all/canonical have no params (everything else does) + if type_ in ("all", "canonical"): + matches.append(match) + continue + if not tokens: + raise ConfigParseError( + "Missing parameter to Match '{}' keyword".format(type_) + ) + match["param"] = tokens.pop(0) + matches.append(match) + # Perform some (easier to do now than in the middle) validation that is + # better handled here than at lookup time. + keywords = [x["type"] for x in matches] + if "all" in keywords: + allowable = ("all", "canonical") + ok, bad = ( + list(filter(lambda x: x in allowable, keywords)), + list(filter(lambda x: x not in allowable, keywords)), + ) + err = None + if any(bad): + err = "Match does not allow 'all' mixed with anything but 'canonical'" # noqa + elif "canonical" in ok and ok.index("canonical") > ok.index("all"): + err = "Match does not allow 'all' before 'canonical'" + if err is not None: + raise ConfigParseError(err) + return matches + + +def _addressfamily_host_lookup(hostname, options): + """ + Try looking up ``hostname`` in an IPv4 or IPv6 specific manner. + + This is an odd duck due to needing use in two divergent use cases. It looks + up ``AddressFamily`` in ``options`` and if it is ``inet`` or ``inet6``, + this function uses `socket.getaddrinfo` to perform a family-specific + lookup, returning the result if successful. + + In any other situation -- lookup failure, or ``AddressFamily`` being + unspecified or ``any`` -- ``None`` is returned instead and the caller is + expected to do something situation-appropriate like calling + `socket.gethostbyname`. + + :param str hostname: Hostname to look up. + :param options: `SSHConfigDict` instance w/ parsed options. + :returns: ``getaddrinfo``-style tuples, or ``None``, depending. + """ + address_family = options.get("addressfamily", "any").lower() + if address_family == "any": + return + try: + family = socket.AF_INET6 + if address_family == "inet": + family = socket.AF_INET + return socket.getaddrinfo( + hostname, + None, + family, + socket.SOCK_DGRAM, + socket.IPPROTO_IP, + socket.AI_CANONNAME, + ) + except socket.gaierror: + pass class LazyFqdn(object): @@ -263,32 +607,85 @@ def __str__(self): # Handle specific option fqdn = None - address_family = self.config.get('addressfamily', 'any').lower() - if address_family != 'any': - try: - family = socket.AF_INET if address_family == 'inet' \ - else socket.AF_INET6 - results = socket.getaddrinfo( - self.host, - None, - family, - socket.SOCK_DGRAM, - socket.IPPROTO_IP, - socket.AI_CANONNAME - ) - for res in results: - af, socktype, proto, canonname, sa = res - if canonname and '.' in canonname: - fqdn = canonname - break - # giaerror -> socket.getaddrinfo() can't resolve self.host - # (which is from socket.gethostname()). Fall back to the - # getfqdn() call below. - except socket.gaierror: - pass - # Handle 'any' / unspecified + results = _addressfamily_host_lookup(self.host, self.config) + if results is not None: + for res in results: + af, socktype, proto, canonname, sa = res + if canonname and "." in canonname: + fqdn = canonname + break + # Handle 'any' / unspecified / lookup failure if fqdn is None: fqdn = socket.getfqdn() # Cache self.fqdn = fqdn return self.fqdn + + +class SSHConfigDict(dict): + """ + A dictionary wrapper/subclass for per-host configuration structures. + + This class introduces some usage niceties for consumers of `SSHConfig`, + specifically around the issue of variable type conversions: normal value + access yields strings, but there are now methods such as `as_bool` and + `as_int` that yield casted values instead. + + For example, given the following ``ssh_config`` file snippet:: + + Host foo.example.com + PasswordAuthentication no + Compression yes + ServerAliveInterval 60 + + the following code highlights how you can access the raw strings as well as + usefully Python type-casted versions (recalling that keys are all + normalized to lowercase first):: + + my_config = SSHConfig() + my_config.parse(open('~/.ssh/config')) + conf = my_config.lookup('foo.example.com') + + assert conf['passwordauthentication'] == 'no' + assert conf.as_bool('passwordauthentication') is False + assert conf['compression'] == 'yes' + assert conf.as_bool('compression') is True + assert conf['serveraliveinterval'] == '60' + assert conf.as_int('serveraliveinterval') == 60 + + .. versionadded:: 2.5 + """ + + def __init__(self, *args, **kwargs): + # Hey, guess what? Python 2's userdict is an old-style class! + super(SSHConfigDict, self).__init__(*args, **kwargs) + + def as_bool(self, key): + """ + Express given key's value as a boolean type. + + Typically, this is used for ``ssh_config``'s pseudo-boolean values + which are either ``"yes"`` or ``"no"``. In such cases, ``"yes"`` yields + ``True`` and any other value becomes ``False``. + + .. note:: + If (for whatever reason) the stored value is already boolean in + nature, it's simply returned. + + .. versionadded:: 2.5 + """ + val = self[key] + if isinstance(val, bool): + return val + return val.lower() == "yes" + + def as_int(self, key): + """ + Express given key's value as an integer, if possible. + + This method will raise ``ValueError`` or similar if the value is not + int-appropriate, same as the builtin `int` type. + + .. versionadded:: 2.5 + """ + return int(self[key]) diff --git a/paramiko/dsskey.py b/paramiko/dsskey.py index 4644e9a67..1a0c47978 100644 --- a/paramiko/dsskey.py +++ b/paramiko/dsskey.py @@ -25,7 +25,8 @@ from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import dsa from cryptography.hazmat.primitives.asymmetric.utils import ( - decode_dss_signature, encode_dss_signature + decode_dss_signature, + encode_dss_signature, ) from paramiko import util @@ -42,12 +43,21 @@ class DSSKey(PKey): data. """ - def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None): + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + vals=None, + file_obj=None, + ): self.p = None self.q = None self.g = None self.y = None self.x = None + self.public_blob = None if file_obj is not None: self._from_private_key(file_obj, password) return @@ -59,10 +69,11 @@ def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, if vals is not None: self.p, self.q, self.g, self.y = vals else: - if msg is None: - raise SSHException('Key object may not be empty') - if msg.get_text() != 'ssh-dss': - raise SSHException('Invalid key') + self._check_type_and_load_cert( + msg=msg, + key_type="ssh-dss", + cert_type="ssh-dss-cert-v01@openssh.com", + ) self.p = msg.get_mpint() self.q = msg.get_mpint() self.g = msg.get_mpint() @@ -71,7 +82,7 @@ def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, def asbytes(self): m = Message() - m.add_string('ssh-dss') + m.add_string("ssh-dss") m.add_mpint(self.p) m.add_mpint(self.q) m.add_mpint(self.g) @@ -81,17 +92,12 @@ def asbytes(self): def __str__(self): return self.asbytes() - def __hash__(self): - h = hash(self.get_name()) - h = h * 37 + hash(self.p) - h = h * 37 + hash(self.q) - h = h * 37 + hash(self.g) - h = h * 37 + hash(self.y) - # h might be a long by now... - return hash(h) + @property + def _fields(self): + return (self.get_name(), self.p, self.q, self.g, self.y) def get_name(self): - return 'ssh-dss' + return "ssh-dss" def get_bits(self): return self.size @@ -99,24 +105,21 @@ def get_bits(self): def can_sign(self): return self.x is not None - def sign_ssh_data(self, data): + def sign_ssh_data(self, data, algorithm=None): key = dsa.DSAPrivateNumbers( x=self.x, public_numbers=dsa.DSAPublicNumbers( y=self.y, parameter_numbers=dsa.DSAParameterNumbers( - p=self.p, - q=self.q, - g=self.g - ) - ) + p=self.p, q=self.q, g=self.g + ), + ), ).private_key(backend=default_backend()) - signer = key.signer(hashes.SHA1()) - signer.update(data) - r, s = decode_dss_signature(signer.finalize()) + sig = key.sign(data, hashes.SHA1()) + r, s = decode_dss_signature(sig) m = Message() - m.add_string('ssh-dss') + m.add_string("ssh-dss") # apparently, in rare cases, r or s may be shorter than 20 bytes! rstr = util.deflate_long(r, 0) sstr = util.deflate_long(s, 0) @@ -133,7 +136,7 @@ def verify_ssh_sig(self, data, msg): sig = msg.asbytes() else: kind = msg.get_text() - if kind != 'ssh-dss': + if kind != "ssh-dss": return 0 sig = msg.get_binary() @@ -146,15 +149,11 @@ def verify_ssh_sig(self, data, msg): key = dsa.DSAPublicNumbers( y=self.y, parameter_numbers=dsa.DSAParameterNumbers( - p=self.p, - q=self.q, - g=self.g - ) + p=self.p, q=self.q, g=self.g + ), ).public_key(backend=default_backend()) - verifier = key.verifier(signature, hashes.SHA1()) - verifier.update(data) try: - verifier.verify() + key.verify(signature, data, hashes.SHA1()) except InvalidSignature: return False else: @@ -166,18 +165,16 @@ def write_private_key_file(self, filename, password=None): public_numbers=dsa.DSAPublicNumbers( y=self.y, parameter_numbers=dsa.DSAParameterNumbers( - p=self.p, - q=self.q, - g=self.g - ) - ) + p=self.p, q=self.q, g=self.g + ), + ), ).private_key(backend=default_backend()) self._write_private_key_file( filename, key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) def write_private_key(self, file_obj, password=None): @@ -186,18 +183,16 @@ def write_private_key(self, file_obj, password=None): public_numbers=dsa.DSAPublicNumbers( y=self.y, parameter_numbers=dsa.DSAParameterNumbers( - p=self.p, - q=self.q, - g=self.g - ) - ) + p=self.p, q=self.q, g=self.g + ), + ), ).private_key(backend=default_backend()) self._write_private_key( file_obj, key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) @staticmethod @@ -207,40 +202,51 @@ def generate(bits=1024, progress_func=None): generate a new host key or authentication key. :param int bits: number of bits the generated key should be. - :param function progress_func: Unused + :param progress_func: Unused :return: new `.DSSKey` private key """ numbers = dsa.generate_private_key( bits, backend=default_backend() ).private_numbers() - key = DSSKey(vals=( - numbers.public_numbers.parameter_numbers.p, - numbers.public_numbers.parameter_numbers.q, - numbers.public_numbers.parameter_numbers.g, - numbers.public_numbers.y - )) + key = DSSKey( + vals=( + numbers.public_numbers.parameter_numbers.p, + numbers.public_numbers.parameter_numbers.q, + numbers.public_numbers.parameter_numbers.g, + numbers.public_numbers.y, + ) + ) key.x = numbers.x return key - ### internals... + # ...internals... def _from_private_key_file(self, filename, password): - data = self._read_private_key_file('DSA', filename, password) + data = self._read_private_key_file("DSA", filename, password) self._decode_key(data) def _from_private_key(self, file_obj, password): - data = self._read_private_key('DSA', file_obj, password) + data = self._read_private_key("DSA", file_obj, password) self._decode_key(data) def _decode_key(self, data): + pkformat, data = data # private key file contains: # DSAPrivateKey = { version = 0, p, q, g, y, x } - try: - keylist = BER(data).decode() - except BERException as e: - raise SSHException('Unable to parse key file: ' + str(e)) - if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0): - raise SSHException('not a valid DSA private key file (bad ber encoding)') + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + keylist = BER(data).decode() + except BERException as e: + raise SSHException("Unable to parse key file: {}".format(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + keylist = self._uint32_cstruct_unpack(data, "iiiii") + keylist = [0] + list(keylist) + else: + self._got_bad_key_format_id(pkformat) + if type(keylist) is not list or len(keylist) < 6 or keylist[0] != 0: + raise SSHException( + "not a valid DSA private key file (bad ber encoding)" + ) self.p = keylist[1] self.q = keylist[2] self.g = keylist[3] diff --git a/paramiko/ecdsakey.py b/paramiko/ecdsakey.py index d435603d9..c4e2b1aff 100644 --- a/paramiko/ecdsakey.py +++ b/paramiko/ecdsakey.py @@ -20,12 +20,13 @@ ECDSA keys """ -from cryptography.exceptions import InvalidSignature +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import ec from cryptography.hazmat.primitives.asymmetric.utils import ( - decode_dss_signature, encode_dss_signature + decode_dss_signature, + encode_dss_signature, ) from paramiko.common import four_byte @@ -43,6 +44,7 @@ class _ECDSACurve(object): the proper hash function. Also grabs the proper curve from the 'ecdsa' package. """ + def __init__(self, curve_class, nist_name): self.nist_name = nist_name self.key_length = curve_class.key_size @@ -67,6 +69,7 @@ class _ECDSACurveSet(object): format identifier. The two ways in which ECDSAKey needs to be able to look up curves. """ + def __init__(self, ecdsa_curves): self.ecdsa_curves = ecdsa_curves @@ -95,16 +98,27 @@ class ECDSAKey(PKey): data. """ - _ECDSA_CURVES = _ECDSACurveSet([ - _ECDSACurve(ec.SECP256R1, 'nistp256'), - _ECDSACurve(ec.SECP384R1, 'nistp384'), - _ECDSACurve(ec.SECP521R1, 'nistp521'), - ]) - - def __init__(self, msg=None, data=None, filename=None, password=None, - vals=None, file_obj=None, validate_point=True): + _ECDSA_CURVES = _ECDSACurveSet( + [ + _ECDSACurve(ec.SECP256R1, "nistp256"), + _ECDSACurve(ec.SECP384R1, "nistp384"), + _ECDSACurve(ec.SECP521R1, "nistp521"), + ] + ) + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + vals=None, + file_obj=None, + validate_point=True, + ): self.verifying_key = None self.signing_key = None + self.public_blob = None if file_obj is not None: self._from_private_key(file_obj, password) return @@ -118,24 +132,40 @@ def __init__(self, msg=None, data=None, filename=None, password=None, c_class = self.signing_key.curve.__class__ self.ecdsa_curve = self._ECDSA_CURVES.get_by_curve_class(c_class) else: - if msg is None: - raise SSHException('Key object may not be empty') + # Must set ecdsa_curve first; subroutines called herein may need to + # spit out our get_name(), which relies on this. + key_type = msg.get_text() + # But this also means we need to hand it a real key/curve + # identifier, so strip out any cert business. (NOTE: could push + # that into _ECDSACurveSet.get_by_key_format_identifier(), but it + # feels more correct to do it here?) + suffix = "-cert-v01@openssh.com" + if key_type.endswith(suffix): + key_type = key_type[: -len(suffix)] self.ecdsa_curve = self._ECDSA_CURVES.get_by_key_format_identifier( - msg.get_text()) - if self.ecdsa_curve is None: - raise SSHException('Invalid key') + key_type + ) + key_types = self._ECDSA_CURVES.get_key_format_identifier_list() + cert_types = [ + "{}-cert-v01@openssh.com".format(x) for x in key_types + ] + self._check_type_and_load_cert( + msg=msg, key_type=key_types, cert_type=cert_types + ) curvename = msg.get_text() if curvename != self.ecdsa_curve.nist_name: - raise SSHException("Can't handle curve of type %s" % curvename) + raise SSHException( + "Can't handle curve of type {}".format(curvename) + ) pointinfo = msg.get_binary() try: - numbers = ec.EllipticCurvePublicNumbers.from_encoded_point( + key = ec.EllipticCurvePublicKey.from_encoded_point( self.ecdsa_curve.curve_class(), pointinfo ) + self.verifying_key = key except ValueError: raise SSHException("Invalid public key") - self.verifying_key = numbers.public_key(backend=default_backend()) @classmethod def supported_key_format_identifiers(cls): @@ -152,10 +182,10 @@ def asbytes(self): key_size_bytes = (key.curve.key_size + 7) // 8 x_bytes = deflate_long(numbers.x, add_sign_padding=False) - x_bytes = b'\x00' * (key_size_bytes - len(x_bytes)) + x_bytes + x_bytes = b"\x00" * (key_size_bytes - len(x_bytes)) + x_bytes y_bytes = deflate_long(numbers.y, add_sign_padding=False) - y_bytes = b'\x00' * (key_size_bytes - len(y_bytes)) + y_bytes + y_bytes = b"\x00" * (key_size_bytes - len(y_bytes)) + y_bytes point_str = four_byte + x_bytes + y_bytes m.add_string(point_str) @@ -164,11 +194,13 @@ def asbytes(self): def __str__(self): return self.asbytes() - def __hash__(self): - h = hash(self.get_name()) - h = h * 37 + hash(self.verifying_key.public_numbers().x) - h = h * 37 + hash(self.verifying_key.public_numbers().y) - return hash(h) + @property + def _fields(self): + return ( + self.get_name(), + self.verifying_key.public_numbers().x, + self.verifying_key.public_numbers().y, + ) def get_name(self): return self.ecdsa_curve.key_format_identifier @@ -179,11 +211,9 @@ def get_bits(self): def can_sign(self): return self.signing_key is not None - def sign_ssh_data(self, data): + def sign_ssh_data(self, data, algorithm=None): ecdsa = ec.ECDSA(self.ecdsa_curve.hash_object()) - signer = self.signing_key.signer(ecdsa) - signer.update(data) - sig = signer.finalize() + sig = self.signing_key.sign(data, ecdsa) r, s = decode_dss_signature(sig) m = Message() @@ -198,12 +228,10 @@ def verify_ssh_sig(self, data, msg): sigR, sigS = self._sigdecode(sig) signature = encode_dss_signature(sigR, sigS) - verifier = self.verifying_key.verifier( - signature, ec.ECDSA(self.ecdsa_curve.hash_object()) - ) - verifier.update(data) try: - verifier.verify() + self.verifying_key.verify( + signature, data, ec.ECDSA(self.ecdsa_curve.hash_object()) + ) except InvalidSignature: return False else: @@ -214,7 +242,7 @@ def write_private_key_file(self, filename, password=None): filename, self.signing_key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) def write_private_key(self, file_obj, password=None): @@ -222,7 +250,7 @@ def write_private_key(self, file_obj, password=None): file_obj, self.signing_key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) @classmethod @@ -231,35 +259,61 @@ def generate(cls, curve=ec.SECP256R1(), progress_func=None, bits=None): Generate a new private ECDSA key. This factory function can be used to generate a new host key or authentication key. - :param function progress_func: Not used for this type of key. + :param progress_func: Not used for this type of key. :returns: A new private key (`.ECDSAKey`) object """ if bits is not None: curve = cls._ECDSA_CURVES.get_by_key_length(bits) if curve is None: - raise ValueError("Unsupported key length: %d"%(bits)) + raise ValueError("Unsupported key length: {:d}".format(bits)) curve = curve.curve_class() private_key = ec.generate_private_key(curve, backend=default_backend()) return ECDSAKey(vals=(private_key, private_key.public_key())) - ### internals... + # ...internals... def _from_private_key_file(self, filename, password): - data = self._read_private_key_file('EC', filename, password) + data = self._read_private_key_file("EC", filename, password) self._decode_key(data) def _from_private_key(self, file_obj, password): - data = self._read_private_key('EC', file_obj, password) + data = self._read_private_key("EC", file_obj, password) self._decode_key(data) def _decode_key(self, data): - try: - key = serialization.load_der_private_key( - data, password=None, backend=default_backend() - ) - except ValueError as e: - raise SSHException(str(e)) + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except ( + ValueError, + AssertionError, + TypeError, + UnsupportedAlgorithm, + ) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + try: + msg = Message(data) + curve_name = msg.get_text() + verkey = msg.get_binary() # noqa: F841 + sigkey = msg.get_mpint() + name = "ecdsa-sha2-" + curve_name + curve = self._ECDSA_CURVES.get_by_key_format_identifier(name) + if not curve: + raise SSHException("Invalid key curve identifier") + key = ec.derive_private_key( + sigkey, curve.curve_class(), default_backend() + ) + except Exception as e: + # PKey._read_private_key_openssh() should check or return + # keytype - parsing could fail for any reason due to wrong type + raise SSHException(str(e)) + else: + self._got_bad_key_format_id(pkformat) self.signing_key = key self.verifying_key = key.public_key() diff --git a/paramiko/ed25519key.py b/paramiko/ed25519key.py new file mode 100644 index 000000000..d322a0c13 --- /dev/null +++ b/paramiko/ed25519key.py @@ -0,0 +1,209 @@ +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. + +import bcrypt + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher + +import nacl.signing + +from paramiko.message import Message +from paramiko.pkey import PKey, OPENSSH_AUTH_MAGIC, _unpad_openssh +from paramiko.py3compat import b +from paramiko.ssh_exception import SSHException, PasswordRequiredException + + +class Ed25519Key(PKey): + """ + Representation of an `Ed25519 `_ key. + + .. note:: + Ed25519 key support was added to OpenSSH in version 6.5. + + .. versionadded:: 2.2 + .. versionchanged:: 2.3 + Added a ``file_obj`` parameter to match other key classes. + """ + + def __init__( + self, msg=None, data=None, filename=None, password=None, file_obj=None + ): + self.public_blob = None + verifying_key = signing_key = None + if msg is None and data is not None: + msg = Message(data) + if msg is not None: + self._check_type_and_load_cert( + msg=msg, + key_type="ssh-ed25519", + cert_type="ssh-ed25519-cert-v01@openssh.com", + ) + verifying_key = nacl.signing.VerifyKey(msg.get_binary()) + elif filename is not None: + with open(filename, "r") as f: + pkformat, data = self._read_private_key("OPENSSH", f) + elif file_obj is not None: + pkformat, data = self._read_private_key("OPENSSH", file_obj) + + if filename or file_obj: + signing_key = self._parse_signing_key_data(data, password) + + if signing_key is None and verifying_key is None: + raise ValueError("need a key") + + self._signing_key = signing_key + self._verifying_key = verifying_key + + def _parse_signing_key_data(self, data, password): + from paramiko.transport import Transport + + # We may eventually want this to be usable for other key types, as + # OpenSSH moves to it, but for now this is just for Ed25519 keys. + # This format is described here: + # https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + # The description isn't totally complete, and I had to refer to the + # source for a full implementation. + message = Message(data) + if message.get_bytes(len(OPENSSH_AUTH_MAGIC)) != OPENSSH_AUTH_MAGIC: + raise SSHException("Invalid key") + + ciphername = message.get_text() + kdfname = message.get_text() + kdfoptions = message.get_binary() + num_keys = message.get_int() + + if kdfname == "none": + # kdfname of "none" must have an empty kdfoptions, the ciphername + # must be "none" + if kdfoptions or ciphername != "none": + raise SSHException("Invalid key") + elif kdfname == "bcrypt": + if not password: + raise PasswordRequiredException( + "Private key file is encrypted" + ) + kdf = Message(kdfoptions) + bcrypt_salt = kdf.get_binary() + bcrypt_rounds = kdf.get_int() + else: + raise SSHException("Invalid key") + + if ciphername != "none" and ciphername not in Transport._cipher_info: + raise SSHException("Invalid key") + + public_keys = [] + for _ in range(num_keys): + pubkey = Message(message.get_binary()) + if pubkey.get_text() != "ssh-ed25519": + raise SSHException("Invalid key") + public_keys.append(pubkey.get_binary()) + + private_ciphertext = message.get_binary() + if ciphername == "none": + private_data = private_ciphertext + else: + cipher = Transport._cipher_info[ciphername] + key = bcrypt.kdf( + password=b(password), + salt=bcrypt_salt, + desired_key_bytes=cipher["key-size"] + cipher["block-size"], + rounds=bcrypt_rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + decryptor = Cipher( + cipher["class"](key[: cipher["key-size"]]), + cipher["mode"](key[cipher["key-size"] :]), + backend=default_backend(), + ).decryptor() + private_data = ( + decryptor.update(private_ciphertext) + decryptor.finalize() + ) + + message = Message(_unpad_openssh(private_data)) + if message.get_int() != message.get_int(): + raise SSHException("Invalid key") + + signing_keys = [] + for i in range(num_keys): + if message.get_text() != "ssh-ed25519": + raise SSHException("Invalid key") + # A copy of the public key, again, ignore. + public = message.get_binary() + key_data = message.get_binary() + # The second half of the key data is yet another copy of the public + # key... + signing_key = nacl.signing.SigningKey(key_data[:32]) + # Verify that all the public keys are the same... + assert ( + signing_key.verify_key.encode() + == public + == public_keys[i] + == key_data[32:] + ) + signing_keys.append(signing_key) + # Comment, ignore. + message.get_binary() + + if len(signing_keys) != 1: + raise SSHException("Invalid key") + return signing_keys[0] + + def asbytes(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + m = Message() + m.add_string("ssh-ed25519") + m.add_string(v.encode()) + return m.asbytes() + + @property + def _fields(self): + if self.can_sign(): + v = self._signing_key.verify_key + else: + v = self._verifying_key + return (self.get_name(), v) + + def get_name(self): + return "ssh-ed25519" + + def get_bits(self): + return 256 + + def can_sign(self): + return self._signing_key is not None + + def sign_ssh_data(self, data, algorithm=None): + m = Message() + m.add_string("ssh-ed25519") + m.add_string(self._signing_key.sign(data).signature) + return m + + def verify_ssh_sig(self, data, msg): + if msg.get_text() != "ssh-ed25519": + return False + + try: + self._verifying_key.verify(data, msg.get_binary()) + except nacl.exceptions.BadSignatureError: + return False + else: + return True diff --git a/paramiko/file.py b/paramiko/file.py index 05f2d6e67..9dd9e9e79 100644 --- a/paramiko/file.py +++ b/paramiko/file.py @@ -15,14 +15,19 @@ # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. -from paramiko.common import linefeed_byte_value, crlf, cr_byte, linefeed_byte, \ - cr_byte_value -from paramiko.py3compat import BytesIO, PY2, u, b, bytes_types +from paramiko.common import ( + linefeed_byte_value, + crlf, + cr_byte, + linefeed_byte, + cr_byte_value, +) +from paramiko.py3compat import BytesIO, PY2, u, bytes_types, text_type from paramiko.util import ClosingContextManager -class BufferedFile (ClosingContextManager): +class BufferedFile(ClosingContextManager): """ Reusable base class to implement Python-style file buffering around a simpler stream. @@ -66,10 +71,10 @@ def __iter__(self): file. This iterator happens to return the file itself, since a file is its own iterator. - :raises ValueError: if the file is closed. + :raises: ``ValueError`` -- if the file is closed. """ if self._closed: - raise ValueError('I/O operation on closed file') + raise ValueError("I/O operation on closed file") return self def close(self): @@ -89,13 +94,14 @@ def flush(self): return if PY2: + def next(self): """ Returns the next line from the input, or raises - `~exceptions.StopIteration` when EOF is hit. Unlike Python file + ``StopIteration`` when EOF is hit. Unlike Python file objects, it's okay to mix calls to `next` and `readline`. - :raises StopIteration: when the end of the file is reached. + :raises: ``StopIteration`` -- when the end of the file is reached. :returns: a line (`str`) read from the file. """ @@ -103,14 +109,16 @@ def next(self): if not line: raise StopIteration return line + else: + def __next__(self): """ - Returns the next line from the input, or raises `.StopIteration` when - EOF is hit. Unlike python file objects, it's okay to mix calls to - `.next` and `.readline`. + Returns the next line from the input, or raises ``StopIteration`` + when EOF is hit. Unlike python file objects, it's okay to mix + calls to `.next` and `.readline`. - :raises StopIteration: when the end of the file is reached. + :raises: ``StopIteration`` -- when the end of the file is reached. :returns: a line (`str`) read from the file. """ @@ -151,21 +159,21 @@ def seekable(self): def readinto(self, buff): """ - Read up to ``len(buff)`` bytes into :class:`bytearray` *buff* and - return the number of bytes read. + Read up to ``len(buff)`` bytes into ``bytearray`` *buff* and return the + number of bytes read. :returns: The number of bytes read. """ data = self.read(len(buff)) - buff[:len(data)] = data + buff[: len(data)] = data return len(data) def read(self, size=None): """ - Read at most ``size`` bytes from the file (less if we hit the end of the - file first). If the ``size`` argument is negative or omitted, read all - the remaining data in the file. + Read at most ``size`` bytes from the file (less if we hit the end of + the file first). If the ``size`` argument is negative or omitted, + read all the remaining data in the file. .. note:: ``'b'`` mode flag is ignored (``self.FLAG_BINARY`` in @@ -179,12 +187,12 @@ def read(self, size=None): encountered immediately """ if self._closed: - raise IOError('File is closed') + raise IOError("File is closed") if not (self._flags & self.FLAG_READ): - raise IOError('File is not open for reading') + raise IOError("File is not open for reading") if (size is None) or (size < 0): # go for broke - result = self._rbuffer + result = bytearray(self._rbuffer) self._rbuffer = bytes() self._pos += len(result) while True: @@ -194,10 +202,10 @@ def read(self, size=None): new_data = None if (new_data is None) or (len(new_data) == 0): break - result += new_data + result.extend(new_data) self._realpos += len(new_data) self._pos += len(new_data) - return result + return bytes(result) if size <= len(self._rbuffer): result = self._rbuffer[:size] self._rbuffer = self._rbuffer[size:] @@ -244,13 +252,17 @@ def readline(self, size=None): """ # it's almost silly how complex this function is. if self._closed: - raise IOError('File is closed') + raise IOError("File is closed") if not (self._flags & self.FLAG_READ): - raise IOError('File not open for reading') + raise IOError("File not open for reading") line = self._rbuffer truncated = False while True: - if self._at_trailing_cr and (self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (len(line) > 0): + if ( + self._at_trailing_cr + and self._flags & self.FLAG_UNIVERSAL_NEWLINE + and len(line) > 0 + ): # edge case: the newline may be '\r\n' and we may have read # only the first '\r' last time. if line[0] == linefeed_byte_value: @@ -271,7 +283,9 @@ def readline(self, size=None): n = size - len(line) else: n = self._bufsize - if (linefeed_byte in line) or ((self._flags & self.FLAG_UNIVERSAL_NEWLINE) and (cr_byte in line)): + if linefeed_byte in line or ( + self._flags & self.FLAG_UNIVERSAL_NEWLINE and cr_byte in line + ): break try: new_data = self._read(n) @@ -294,12 +308,20 @@ def readline(self, size=None): self._pos += len(line) return line if self._flags & self.FLAG_BINARY else u(line) xpos = pos + 1 - if (line[pos] == cr_byte_value) and (xpos < len(line)) and (line[xpos] == linefeed_byte_value): + if ( + line[pos] == cr_byte_value + and xpos < len(line) + and line[xpos] == linefeed_byte_value + ): xpos += 1 # if the string was truncated, _rbuffer needs to have the string after # the newline character plus the truncated part of the line we stored # earlier in _rbuffer - self._rbuffer = line[xpos:] + self._rbuffer if truncated else line[xpos:] + if truncated: + self._rbuffer = line[xpos:] + self._rbuffer + else: + self._rbuffer = line[xpos:] + lf = line[pos:xpos] line = line[:pos] + linefeed_byte if (len(self._rbuffer) == 0) and (lf == cr_byte): @@ -319,7 +341,7 @@ def readlines(self, sizehint=None): after rounding up to an internal buffer size) are read. :param int sizehint: desired maximum number of bytes to read. - :returns: `list` of lines read from the file. + :returns: list of lines read from the file. """ lines = [] byte_count = 0 @@ -349,9 +371,9 @@ def seek(self, offset, whence=0): type of movement: 0 = absolute; 1 = relative to the current position; 2 = relative to the end of the file. - :raises IOError: if the file doesn't support random access. + :raises: ``IOError`` -- if the file doesn't support random access. """ - raise IOError('File does not support seeking.') + raise IOError("File does not support seeking.") def tell(self): """ @@ -370,13 +392,15 @@ def write(self, data): written yet. (Use `flush` or `close` to force buffered data to be written out.) - :param str data: data to write + :param data: ``str``/``bytes`` data to write """ - data = b(data) + if isinstance(data, text_type): + # Accept text and encode as utf-8 for compatibility only. + data = data.encode("utf-8") if self._closed: - raise IOError('File is closed') + raise IOError("File is closed") if not (self._flags & self.FLAG_WRITE): - raise IOError('File not open for writing') + raise IOError("File not open for writing") if not (self._flags & self.FLAG_BUFFERED): self._write_all(data) return @@ -387,9 +411,9 @@ def write(self, data): if last_newline_pos >= 0: wbuf = self._wbuffer.getvalue() last_newline_pos += len(wbuf) - len(data) - self._write_all(wbuf[:last_newline_pos + 1]) + self._write_all(wbuf[: last_newline_pos + 1]) self._wbuffer = BytesIO() - self._wbuffer.write(wbuf[last_newline_pos + 1:]) + self._wbuffer.write(wbuf[last_newline_pos + 1 :]) return # even if we're line buffering, if the buffer has grown past the # buffer size, force a flush. @@ -404,7 +428,7 @@ def writelines(self, sequence): name is intended to match `readlines`; `writelines` does not add line separators.) - :param iterable sequence: an iterable sequence of strings. + :param sequence: an iterable sequence of strings. """ for line in sequence: self.write(line) @@ -421,7 +445,7 @@ def xreadlines(self): def closed(self): return self._closed - ### overrides... + # ...overrides... def _read(self, size): """ @@ -436,7 +460,7 @@ def _write(self, data): (subclass override) Write data into the stream. """ - raise IOError('write not implemented') + raise IOError("write not implemented") def _get_size(self): """ @@ -449,9 +473,9 @@ def _get_size(self): """ return 0 - ### internals... + # ...internals... - def _set_mode(self, mode='r', bufsize=-1): + def _set_mode(self, mode="r", bufsize=-1): """ Subclasses call this method to initialize the BufferedFile. """ @@ -474,26 +498,27 @@ def _set_mode(self, mode='r', bufsize=-1): # unbuffered self._flags &= ~(self.FLAG_BUFFERED | self.FLAG_LINE_BUFFERED) - if ('r' in mode) or ('+' in mode): + if ("r" in mode) or ("+" in mode): self._flags |= self.FLAG_READ - if ('w' in mode) or ('+' in mode): + if ("w" in mode) or ("+" in mode): self._flags |= self.FLAG_WRITE - if 'a' in mode: + if "a" in mode: self._flags |= self.FLAG_WRITE | self.FLAG_APPEND self._size = self._get_size() self._pos = self._realpos = self._size - if 'b' in mode: + if "b" in mode: self._flags |= self.FLAG_BINARY - if 'U' in mode: + if "U" in mode: self._flags |= self.FLAG_UNIVERSAL_NEWLINE # built-in file objects have this attribute to store which kinds of # line terminations they've seen: # self.newlines = None - def _write_all(self, data): + def _write_all(self, raw_data): # the underlying stream may be something that does partial writes (like # a socket). + data = memoryview(raw_data) while len(data) > 0: count = self._write(data) data = data[count:] @@ -513,7 +538,9 @@ def _record_newline(self, newline): return if self.newlines is None: self.newlines = newline - elif self.newlines != newline and isinstance(self.newlines, bytes_types): + elif self.newlines != newline and isinstance( + self.newlines, bytes_types + ): self.newlines = (self.newlines, newline) elif newline not in self.newlines: self.newlines += (newline,) diff --git a/paramiko/hostkeys.py b/paramiko/hostkeys.py index 2ee3d27f5..94474e40a 100644 --- a/paramiko/hostkeys.py +++ b/paramiko/hostkeys.py @@ -19,26 +19,26 @@ import binascii import os +import sys +if sys.version_info[:2] >= (3, 3): + from collections.abc import MutableMapping +else: + from collections import MutableMapping from hashlib import sha1 from hmac import HMAC from paramiko.py3compat import b, u, encodebytes, decodebytes -try: - from collections import MutableMapping -except ImportError: - # noinspection PyUnresolvedReferences - from UserDict import DictMixin as MutableMapping - from paramiko.dsskey import DSSKey from paramiko.rsakey import RSAKey from paramiko.util import get_logger, constant_time_bytes_eq from paramiko.ecdsakey import ECDSAKey +from paramiko.ed25519key import Ed25519Key from paramiko.ssh_exception import SSHException -class HostKeys (MutableMapping): +class HostKeys(MutableMapping): """ Representation of an OpenSSH-style "known hosts" file. Host keys can be read from one or more files, and then individual hosts can be looked up to @@ -90,12 +90,12 @@ def load(self, filename): :param str filename: name of the file to read host keys from - :raises IOError: if there was an error reading the file + :raises: ``IOError`` -- if there was an error reading the file """ - with open(filename, 'r') as f: + with open(filename, "r") as f: for lineno, line in enumerate(f, 1): line = line.strip() - if (len(line) == 0) or (line[0] == '#'): + if (len(line) == 0) or (line[0] == "#"): continue try: e = HostKeyEntry.from_line(line, lineno) @@ -111,18 +111,18 @@ def load(self, filename): def save(self, filename): """ - Save host keys into a file, in the format used by OpenSSH. The order of - keys in the file will be preserved when possible (if these keys were + Save host keys into a file, in the format used by OpenSSH. The order + of keys in the file will be preserved when possible (if these keys were loaded from a file originally). The single exception is that combined lines will be split into individual key lines, which is arguably a bug. :param str filename: name of the file to write - :raises IOError: if there was an error writing the file + :raises: ``IOError`` -- if there was an error writing the file .. versionadded:: 1.6.1 """ - with open(filename, 'w') as f: + with open(filename, "w") as f: for e in self._entries: line = e.to_line() if line: @@ -135,9 +135,11 @@ def lookup(self, hostname): returned. The keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``. :param str hostname: the hostname (or IP) to lookup - :return: dict of `str` -> `.PKey` keys associated with this host (or ``None``) + :return: dict of `str` -> `.PKey` keys associated with this host + (or ``None``) """ - class SubDict (MutableMapping): + + class SubDict(MutableMapping): def __init__(self, hostname, entries, hostkeys): self._hostname = hostname self._entries = entries @@ -154,6 +156,7 @@ def __delitem__(self, key): for e in list(self._entries): if e.key.get_name() == key: self._entries.remove(e) + break else: raise KeyError(key) @@ -178,17 +181,36 @@ def __setitem__(self, key, val): self._hostkeys._entries.append(e) def keys(self): - return [e.key.get_name() for e in self._entries if e.key is not None] + return [ + e.key.get_name() + for e in self._entries + if e.key is not None + ] entries = [] for e in self._entries: - for h in e.hostnames: - if h.startswith('|1|') and not hostname.startswith('|1|') and constant_time_bytes_eq(self.hash_host(hostname, h), h) or h == hostname: - entries.append(e) + if self._hostname_matches(hostname, e): + entries.append(e) if len(entries) == 0: return None return SubDict(hostname, entries, self) + def _hostname_matches(self, hostname, entry): + """ + Tests whether ``hostname`` string matches given SubDict ``entry``. + + :returns bool: + """ + for h in entry.hostnames: + if ( + h == hostname + or h.startswith("|1|") + and not hostname.startswith("|1|") + and constant_time_bytes_eq(self.hash_host(hostname, h), h) + ): + return True + return False + def check(self, hostname, key): """ Return True if the given key is associated with the given hostname @@ -220,15 +242,22 @@ def __iter__(self): def __len__(self): return len(self.keys()) - def __delitem__(self, key): - k = self[key] - def __getitem__(self, key): ret = self.lookup(key) if ret is None: raise KeyError(key) return ret + def __delitem__(self, key): + index = None + for i, entry in enumerate(self._entries): + if self._hostname_matches(key, entry): + index = i + break + if index is None: + raise KeyError(key) + self._entries.pop(index) + def __setitem__(self, hostname, entry): # don't use this please. if len(entry) == 0: @@ -237,7 +266,7 @@ def __setitem__(self, hostname, entry): for key_type in entry.keys(): found = False for e in self._entries: - if (hostname in e.hostnames) and (e.key.get_name() == key_type): + if (hostname in e.hostnames) and e.key.get_name() == key_type: # replace e.key = entry[key_type] found = True @@ -266,19 +295,20 @@ def hash_host(hostname, salt=None): hashed hostnames in the known_hosts file. :param str hostname: the hostname to hash - :param str salt: optional salt to use when hashing (must be 20 bytes long) + :param str salt: optional salt to use when hashing + (must be 20 bytes long) :return: the hashed hostname as a `str` """ if salt is None: salt = os.urandom(sha1().digest_size) else: - if salt.startswith('|1|'): - salt = salt.split('|')[2] + if salt.startswith("|1|"): + salt = salt.split("|")[2] salt = decodebytes(b(salt)) assert len(salt) == sha1().digest_size hmac = HMAC(salt, b(hostname), sha1).digest() - hostkey = '|1|%s|%s' % (u(encodebytes(salt)), u(encodebytes(hmac))) - return hostkey.replace('\n', '') + hostkey = "|1|{}|{}".format(u(encodebytes(salt)), u(encodebytes(hmac))) + return hostkey.replace("\n", "") class InvalidHostKey(Exception): @@ -311,30 +341,32 @@ def from_line(cls, line, lineno=None): :param str line: a line from an OpenSSH known_hosts file """ - log = get_logger('paramiko.hostkeys') - fields = line.split(' ') + log = get_logger("paramiko.hostkeys") + fields = line.split(" ") if len(fields) < 3: # Bad number of fields - log.info("Not enough fields found in known_hosts in line %s (%r)" % - (lineno, line)) + msg = "Not enough fields found in known_hosts in line {} ({!r})" + log.info(msg.format(lineno, line)) return None fields = fields[:3] names, keytype, key = fields - names = names.split(',') + names = names.split(",") # Decide what kind of key we're looking at and create an object # to hold it accordingly. try: key = b(key) - if keytype == 'ssh-rsa': + if keytype == "ssh-rsa": key = RSAKey(data=decodebytes(key)) - elif keytype == 'ssh-dss': + elif keytype == "ssh-dss": key = DSSKey(data=decodebytes(key)) elif keytype in ECDSAKey.supported_key_format_identifiers(): key = ECDSAKey(data=decodebytes(key), validate_point=False) + elif keytype == "ssh-ed25519": + key = Ed25519Key(data=decodebytes(key)) else: - log.info("Unable to handle key of type %s" % (keytype,)) + log.info("Unable to handle key of type {}".format(keytype)) return None except binascii.Error as e: @@ -349,9 +381,12 @@ def to_line(self): included. """ if self.valid: - return '%s %s %s\n' % (','.join(self.hostnames), self.key.get_name(), - self.key.get_base64()) + return "{} {} {}\n".format( + ",".join(self.hostnames), + self.key.get_name(), + self.key.get_base64(), + ) return None def __repr__(self): - return '' % (self.hostnames, self.key) + return "".format(self.hostnames, self.key) diff --git a/paramiko/kex_curve25519.py b/paramiko/kex_curve25519.py new file mode 100644 index 000000000..3420fb4f2 --- /dev/null +++ b/paramiko/kex_curve25519.py @@ -0,0 +1,131 @@ +import binascii +import hashlib + +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PrivateKey, + X25519PublicKey, +) + +from paramiko.message import Message +from paramiko.py3compat import byte_chr, long +from paramiko.ssh_exception import SSHException + + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexCurve25519(object): + hash_algo = hashlib.sha256 + + def __init__(self, transport): + self.transport = transport + self.key = None + + @classmethod + def is_available(cls): + try: + X25519PrivateKey.generate() + except UnsupportedAlgorithm: + return False + else: + return True + + def _perform_exchange(self, peer_key): + secret = self.key.exchange(peer_key) + if constant_time.bytes_eq(secret, b"\x00" * 32): + raise SSHException( + "peer's curve25519 public value has wrong order" + ) + return secret + + def start_kex(self): + self.key = X25519PrivateKey.generate() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + m.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexCurve25519 asked to handle packet type {:d}".format(ptype) + ) + + def _parse_kexecdh_init(self, m): + peer_key_bytes = m.get_string() + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + K = self._perform_exchange(peer_key) + K = long(binascii.hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + server_key_bytes = self.transport.get_server_key().asbytes() + exchange_key_bytes = self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + hm.add_string(server_key_bytes) + hm.add_string(peer_key_bytes) + hm.add_string(exchange_key_bytes) + hm.add_mpint(K) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(server_key_bytes) + m.add_string(exchange_key_bytes) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + peer_host_key_bytes = m.get_string() + peer_key_bytes = m.get_string() + sig = m.get_binary() + + peer_key = X25519PublicKey.from_public_bytes(peer_key_bytes) + + K = self._perform_exchange(peer_key) + K = long(binascii.hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(peer_host_key_bytes) + hm.add_string( + self.key.public_key().public_bytes( + serialization.Encoding.Raw, serialization.PublicFormat.Raw + ) + ) + hm.add_string(peer_key_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(peer_host_key_bytes, sig) + self.transport._activate_outbound() diff --git a/paramiko/kex_ecdh_nist.py b/paramiko/kex_ecdh_nist.py new file mode 100644 index 000000000..19de24313 --- /dev/null +++ b/paramiko/kex_ecdh_nist.py @@ -0,0 +1,151 @@ +""" +Ephemeral Elliptic Curve Diffie-Hellman (ECDH) key exchange +RFC 5656, Section 4 +""" + +from hashlib import sha256, sha384, sha512 +from paramiko.message import Message +from paramiko.py3compat import byte_chr, long +from paramiko.ssh_exception import SSHException +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives import serialization +from binascii import hexlify + +_MSG_KEXECDH_INIT, _MSG_KEXECDH_REPLY = range(30, 32) +c_MSG_KEXECDH_INIT, c_MSG_KEXECDH_REPLY = [byte_chr(c) for c in range(30, 32)] + + +class KexNistp256: + + name = "ecdh-sha2-nistp256" + hash_algo = sha256 + curve = ec.SECP256R1() + + def __init__(self, transport): + self.transport = transport + # private key, client public and server public keys + self.P = long(0) + self.Q_C = None + self.Q_S = None + + def start_kex(self): + self._generate_key_pair() + if self.transport.server_mode: + self.transport._expect_packet(_MSG_KEXECDH_INIT) + return + m = Message() + m.add_byte(c_MSG_KEXECDH_INIT) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + m.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + self.transport._send_message(m) + self.transport._expect_packet(_MSG_KEXECDH_REPLY) + + def parse_next(self, ptype, m): + if self.transport.server_mode and (ptype == _MSG_KEXECDH_INIT): + return self._parse_kexecdh_init(m) + elif not self.transport.server_mode and (ptype == _MSG_KEXECDH_REPLY): + return self._parse_kexecdh_reply(m) + raise SSHException( + "KexECDH asked to handle packet type {:d}".format(ptype) + ) + + def _generate_key_pair(self): + self.P = ec.generate_private_key(self.curve, default_backend()) + if self.transport.server_mode: + self.Q_S = self.P.public_key() + return + self.Q_C = self.P.public_key() + + def _parse_kexecdh_init(self, m): + Q_C_bytes = m.get_string() + self.Q_C = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_C_bytes + ) + K_S = self.transport.get_server_key().asbytes() + K = self.P.exchange(ec.ECDH(), self.Q_C) + K = long(hexlify(K), 16) + # compute exchange hash + hm = Message() + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) + hm.add_string(K_S) + hm.add_string(Q_C_bytes) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_mpint(long(K)) + H = self.hash_algo(hm.asbytes()).digest() + self.transport._set_K_H(K, H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) + # construct reply + m = Message() + m.add_byte(c_MSG_KEXECDH_REPLY) + m.add_string(K_S) + m.add_string( + self.Q_S.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + m.add_string(sig) + self.transport._send_message(m) + self.transport._activate_outbound() + + def _parse_kexecdh_reply(self, m): + K_S = m.get_string() + Q_S_bytes = m.get_string() + self.Q_S = ec.EllipticCurvePublicKey.from_encoded_point( + self.curve, Q_S_bytes + ) + sig = m.get_binary() + K = self.P.exchange(ec.ECDH(), self.Q_S) + K = long(hexlify(K), 16) + # compute exchange hash and verify signature + hm = Message() + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) + hm.add_string(K_S) + # SEC1: V2.0 2.3.3 Elliptic-Curve-Point-to-Octet-String Conversion + hm.add_string( + self.Q_C.public_bytes( + serialization.Encoding.X962, + serialization.PublicFormat.UncompressedPoint, + ) + ) + hm.add_string(Q_S_bytes) + hm.add_mpint(K) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) + self.transport._verify_key(K_S, sig) + self.transport._activate_outbound() + + +class KexNistp384(KexNistp256): + name = "ecdh-sha2-nistp384" + hash_algo = sha384 + curve = ec.SECP384R1() + + +class KexNistp521(KexNistp256): + name = "ecdh-sha2-nistp521" + hash_algo = sha512 + curve = ec.SECP521R1() diff --git a/paramiko/kex_gex.py b/paramiko/kex_gex.py index c980b6906..ab462e6d7 100644 --- a/paramiko/kex_gex.py +++ b/paramiko/kex_gex.py @@ -32,15 +32,26 @@ from paramiko.ssh_exception import SSHException -_MSG_KEXDH_GEX_REQUEST_OLD, _MSG_KEXDH_GEX_GROUP, _MSG_KEXDH_GEX_INIT, \ - _MSG_KEXDH_GEX_REPLY, _MSG_KEXDH_GEX_REQUEST = range(30, 35) -c_MSG_KEXDH_GEX_REQUEST_OLD, c_MSG_KEXDH_GEX_GROUP, c_MSG_KEXDH_GEX_INIT, \ - c_MSG_KEXDH_GEX_REPLY, c_MSG_KEXDH_GEX_REQUEST = [byte_chr(c) for c in range(30, 35)] +( + _MSG_KEXDH_GEX_REQUEST_OLD, + _MSG_KEXDH_GEX_GROUP, + _MSG_KEXDH_GEX_INIT, + _MSG_KEXDH_GEX_REPLY, + _MSG_KEXDH_GEX_REQUEST, +) = range(30, 35) +( + c_MSG_KEXDH_GEX_REQUEST_OLD, + c_MSG_KEXDH_GEX_GROUP, + c_MSG_KEXDH_GEX_INIT, + c_MSG_KEXDH_GEX_REPLY, + c_MSG_KEXDH_GEX_REQUEST, +) = [byte_chr(c) for c in range(30, 35)] -class KexGex (object): - name = 'diffie-hellman-group-exchange-sha1' +class KexGex(object): + + name = "diffie-hellman-group-exchange-sha1" min_bits = 1024 max_bits = 8192 preferred_bits = 2048 @@ -58,7 +69,9 @@ def __init__(self, transport): def start_kex(self, _test_old_style=False): if self.transport.server_mode: - self.transport._expect_packet(_MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD) + self.transport._expect_packet( + _MSG_KEXDH_GEX_REQUEST, _MSG_KEXDH_GEX_REQUEST_OLD + ) return # request a bit range: we accept (min_bits) to (max_bits), but prefer # (preferred_bits). according to the spec, we shouldn't pull the @@ -88,9 +101,10 @@ def parse_next(self, ptype, m): return self._parse_kexdh_gex_reply(m) elif ptype == _MSG_KEXDH_GEX_REQUEST_OLD: return self._parse_kexdh_gex_request_old(m) - raise SSHException('KexGex %s asked to handle packet type %d' % self.name, ptype) + msg = "KexGex {} asked to handle packet type {:d}" + raise SSHException(msg.format(self.name, ptype)) - ### internals... + # ...internals... def _generate_x(self): # generate an "x" (1 < x < (p-1)/2). @@ -133,8 +147,13 @@ def _parse_kexdh_gex_request(self, m): # generate prime pack = self.transport._get_modulus_pack() if pack is None: - raise SSHException('Can\'t do server-side gex with no modulus pack') - self.transport._log(DEBUG, 'Picking p (%d <= %d <= %d bits)' % (minbits, preferredbits, maxbits)) + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) m = Message() m.add_byte(c_MSG_KEXDH_GEX_GROUP) @@ -144,7 +163,8 @@ def _parse_kexdh_gex_request(self, m): self.transport._expect_packet(_MSG_KEXDH_GEX_INIT) def _parse_kexdh_gex_request_old(self, m): - # same as above, but without min_bits or max_bits (used by older clients like putty) + # same as above, but without min_bits or max_bits (used by older + # clients like putty) self.preferred_bits = m.get_int() # smoosh the user's preferred size into our own limits if self.preferred_bits > self.max_bits: @@ -154,9 +174,13 @@ def _parse_kexdh_gex_request_old(self, m): # generate prime pack = self.transport._get_modulus_pack() if pack is None: - raise SSHException('Can\'t do server-side gex with no modulus pack') - self.transport._log(DEBUG, 'Picking p (~ %d bits)' % (self.preferred_bits,)) - self.g, self.p = pack.get_modulus(self.min_bits, self.preferred_bits, self.max_bits) + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, "Picking p (~ {} bits)".format(self.preferred_bits) + ) + self.g, self.p = pack.get_modulus( + self.min_bits, self.preferred_bits, self.max_bits + ) m = Message() m.add_byte(c_MSG_KEXDH_GEX_GROUP) m.add_mpint(self.p) @@ -171,8 +195,11 @@ def _parse_kexdh_gex_group(self, m): # reject if p's bit length < 1024 or > 8192 bitlen = util.bit_length(self.p) if (bitlen < 1024) or (bitlen > 8192): - raise SSHException('Server-generated gex p (don\'t ask) is out of range (%d bits)' % bitlen) - self.transport._log(DEBUG, 'Got server p (%d bits)' % bitlen) + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log(DEBUG, "Got server p ({} bits)".format(bitlen)) self._generate_x() # now compute e = g^x mod p self.e = pow(self.g, self.x, self.p) @@ -190,11 +217,16 @@ def _parse_kexdh_gex_init(self, m): self.f = pow(self.g, self.x, self.p) K = pow(self.e, self.x, self.p) key = self.transport.get_server_key().asbytes() - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa hm = Message() - hm.add(self.transport.remote_version, self.transport.local_version, - self.transport.remote_kex_init, self.transport.local_kex_init, - key) + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) if not self.old_style: hm.add_int(self.min_bits) hm.add_int(self.preferred_bits) @@ -208,7 +240,9 @@ def _parse_kexdh_gex_init(self, m): H = self.hash_algo(hm.asbytes()).digest() self.transport._set_K_H(K, H) # sign it - sig = self.transport.get_server_key().sign_ssh_data(H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) # send reply m = Message() m.add_byte(c_MSG_KEXDH_GEX_REPLY) @@ -225,11 +259,16 @@ def _parse_kexdh_gex_reply(self, m): if (self.f < 1) or (self.f > self.p - 1): raise SSHException('Server kex "f" is out of range') K = pow(self.f, self.x, self.p) - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa hm = Message() - hm.add(self.transport.local_version, self.transport.remote_version, - self.transport.local_kex_init, self.transport.remote_kex_init, - host_key) + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + host_key, + ) if not self.old_style: hm.add_int(self.min_bits) hm.add_int(self.preferred_bits) @@ -244,6 +283,7 @@ def _parse_kexdh_gex_reply(self, m): self.transport._verify_key(host_key, sig) self.transport._activate_outbound() + class KexGexSHA256(KexGex): - name = 'diffie-hellman-group-exchange-sha256' + name = "diffie-hellman-group-exchange-sha256" hash_algo = sha256 diff --git a/paramiko/kex_group1.py b/paramiko/kex_group1.py index 9eee066c7..6d548b013 100644 --- a/paramiko/kex_group1.py +++ b/paramiko/kex_group1.py @@ -41,10 +41,10 @@ class KexGroup1(object): # draft-ietf-secsh-transport-09.txt, page 17 - P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa G = 2 - name = 'diffie-hellman-group1-sha1' + name = "diffie-hellman-group1-sha1" hash_algo = sha1 def __init__(self, transport): @@ -73,21 +73,25 @@ def parse_next(self, ptype, m): return self._parse_kexdh_init(m) elif not self.transport.server_mode and (ptype == _MSG_KEXDH_REPLY): return self._parse_kexdh_reply(m) - raise SSHException('KexGroup1 asked to handle packet type %d' % ptype) + msg = "KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) - ### internals... + # ...internals... def _generate_x(self): # generate an "x" (1 < x < q), where q is (p-1)/2. - # p is a 128-byte (1024-bit) number, where the first 64 bits are 1. + # p is a 128-byte (1024-bit) number, where the first 64 bits are 1. # therefore q can be approximated as a 2^1023. we drop the subset of - # potential x where the first 63 bits are 1, because some of those will be - # larger than q (but this is a tiny tiny subset of potential x). + # potential x where the first 63 bits are 1, because some of those + # will be larger than q (but this is a tiny tiny subset of + # potential x). while 1: x_bytes = os.urandom(128) x_bytes = byte_mask(x_bytes[0], 0x7f) + x_bytes[1:] - if (x_bytes[:8] != b7fffffffffffffff and - x_bytes[:8] != b0000000000000000): + if ( + x_bytes[:8] != b7fffffffffffffff + and x_bytes[:8] != b0000000000000000 + ): break self.x = util.inflate_long(x_bytes) @@ -99,15 +103,20 @@ def _parse_kexdh_reply(self, m): raise SSHException('Server kex "f" is out of range') sig = m.get_binary() K = pow(self.f, self.x, self.P) - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) hm = Message() - hm.add(self.transport.local_version, self.transport.remote_version, - self.transport.local_kex_init, self.transport.remote_kex_init) + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) hm.add_string(host_key) hm.add_mpint(self.e) hm.add_mpint(self.f) hm.add_mpint(K) - self.transport._set_K_H(K, sha1(hm.asbytes()).digest()) + self.transport._set_K_H(K, self.hash_algo(hm.asbytes()).digest()) self.transport._verify_key(host_key, sig) self.transport._activate_outbound() @@ -118,18 +127,25 @@ def _parse_kexdh_init(self, m): raise SSHException('Client kex "e" is out of range') K = pow(self.e, self.x, self.P) key = self.transport.get_server_key().asbytes() - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) hm = Message() - hm.add(self.transport.remote_version, self.transport.local_version, - self.transport.remote_kex_init, self.transport.local_kex_init) + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) hm.add_string(key) hm.add_mpint(self.e) hm.add_mpint(self.f) hm.add_mpint(K) - H = sha1(hm.asbytes()).digest() + H = self.hash_algo(hm.asbytes()).digest() self.transport._set_K_H(K, H) # sign it - sig = self.transport.get_server_key().sign_ssh_data(H) + sig = self.transport.get_server_key().sign_ssh_data( + H, self.transport.host_key_type + ) # send reply m = Message() m.add_byte(c_MSG_KEXDH_REPLY) diff --git a/paramiko/kex_group14.py b/paramiko/kex_group14.py index 9f7dd2162..a620c1a38 100644 --- a/paramiko/kex_group14.py +++ b/paramiko/kex_group14.py @@ -22,14 +22,19 @@ """ from paramiko.kex_group1 import KexGroup1 -from hashlib import sha1 +from hashlib import sha1, sha256 class KexGroup14(KexGroup1): # http://tools.ietf.org/html/rfc3526#section-3 - P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa G = 2 - name = 'diffie-hellman-group14-sha1' + name = "diffie-hellman-group14-sha1" hash_algo = sha1 + + +class KexGroup14SHA256(KexGroup14): + name = "diffie-hellman-group14-sha256" + hash_algo = sha256 diff --git a/paramiko/kex_group16.py b/paramiko/kex_group16.py new file mode 100644 index 000000000..15b0acfe8 --- /dev/null +++ b/paramiko/kex_group16.py @@ -0,0 +1,35 @@ +# Copyright (C) 2019 Edgar Sousa +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. + +""" +Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of +4096 bit key halves, using a known "p" prime and "g" generator. +""" + +from paramiko.kex_group1 import KexGroup1 +from hashlib import sha512 + + +class KexGroup16SHA512(KexGroup1): + name = "diffie-hellman-group16-sha512" + # http://tools.ietf.org/html/rfc3526#section-5 + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF # noqa + G = 2 + + name = "diffie-hellman-group16-sha512" + hash_algo = sha512 diff --git a/paramiko/kex_gss.py b/paramiko/kex_gss.py index 69969f8a8..f83a2dc4d 100644 --- a/paramiko/kex_gss.py +++ b/paramiko/kex_gss.py @@ -40,19 +40,31 @@ import os from hashlib import sha1 -from paramiko.common import * +from paramiko.common import DEBUG, max_byte, zero_byte from paramiko import util from paramiko.message import Message -from paramiko.py3compat import byte_chr, long, byte_mask, byte_ord +from paramiko.py3compat import byte_chr, byte_mask, byte_ord from paramiko.ssh_exception import SSHException -MSG_KEXGSS_INIT, MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_HOSTKEY,\ -MSG_KEXGSS_ERROR = range(30, 35) -MSG_KEXGSS_GROUPREQ, MSG_KEXGSS_GROUP = range(40, 42) -c_MSG_KEXGSS_INIT, c_MSG_KEXGSS_CONTINUE, c_MSG_KEXGSS_COMPLETE,\ -c_MSG_KEXGSS_HOSTKEY, c_MSG_KEXGSS_ERROR = [byte_chr(c) for c in range(30, 35)] -c_MSG_KEXGSS_GROUPREQ, c_MSG_KEXGSS_GROUP = [byte_chr(c) for c in range(40, 42)] +( + MSG_KEXGSS_INIT, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_ERROR, +) = range(30, 35) +(MSG_KEXGSS_GROUPREQ, MSG_KEXGSS_GROUP) = range(40, 42) +( + c_MSG_KEXGSS_INIT, + c_MSG_KEXGSS_CONTINUE, + c_MSG_KEXGSS_COMPLETE, + c_MSG_KEXGSS_HOSTKEY, + c_MSG_KEXGSS_ERROR, +) = [byte_chr(c) for c in range(30, 35)] +(c_MSG_KEXGSS_GROUPREQ, c_MSG_KEXGSS_GROUP) = [ + byte_chr(c) for c in range(40, 42) +] class KexGSSGroup1(object): @@ -60,11 +72,12 @@ class KexGSSGroup1(object): GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange as defined in `RFC 4462 Section 2 `_ """ + # draft-ietf-secsh-transport-09.txt, page 17 - P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa G = 2 - b7fffffffffffffff = byte_chr(0x7f) + max_byte * 7 - b0000000000000000 = zero_byte * 8 + b7fffffffffffffff = byte_chr(0x7f) + max_byte * 7 # noqa + b0000000000000000 = zero_byte * 8 # noqa NAME = "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==" def __init__(self, transport): @@ -79,7 +92,6 @@ def start_kex(self): """ Start the GSS-API / SSPI Authenticated Diffie-Hellman Key Exchange. """ - self.transport.gss_kex_used = True self._generate_x() if self.transport.server_mode: # compute f = g^x mod p, but don't send it yet @@ -95,16 +107,18 @@ def start_kex(self): m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) m.add_mpint(self.e) self.transport._send_message(m) - self.transport._expect_packet(MSG_KEXGSS_HOSTKEY, - MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) def parse_next(self, ptype, m): """ Parse the next packet. - :param char ptype: The type of the incomming packet + :param ptype: The (string) type of the incoming packet :param `.Message` m: The paket content """ if self.transport.server_mode and (ptype == MSG_KEXGSS_INIT): @@ -117,8 +131,8 @@ def parse_next(self, ptype, m): return self._parse_kexgss_complete(m) elif ptype == MSG_KEXGSS_ERROR: return self._parse_kexgss_error(m) - raise SSHException('GSS KexGroup1 asked to handle packet type %d' - % ptype) + msg = "GSS KexGroup1 asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) # ## internals... @@ -127,14 +141,14 @@ def _generate_x(self): generate an "x" (1 < x < q), where q is (p-1)/2. p is a 128-byte (1024-bit) number, where the first 64 bits are 1. therefore q can be approximated as a 2^1023. we drop the subset of - potential x where the first 63 bits are 1, because some of those will be - larger than q (but this is a tiny tiny subset of potential x). + potential x where the first 63 bits are 1, because some of those will + be larger than q (but this is a tiny tiny subset of potential x). """ while 1: x_bytes = os.urandom(128) x_bytes = byte_mask(x_bytes[0], 0x7f) + x_bytes[1:] - if (x_bytes[:8] != self.b7fffffffffffffff) and \ - (x_bytes[:8] != self.b0000000000000000): + first = x_bytes[:8] + if first not in (self.b7fffffffffffffff, self.b0000000000000000): break self.x = util.inflate_long(x_bytes) @@ -149,25 +163,28 @@ def _parse_kexgss_hostkey(self, m): self.transport.host_key = host_key sig = m.get_string() self.transport._verify_key(host_key, sig) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) def _parse_kexgss_continue(self, m): """ Parse the SSH2_MSG_KEXGSS_CONTINUE message. - :param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE message + :param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE + message """ if not self.transport.server_mode: srv_token = m.get_string() m = Message() m.add_byte(c_MSG_KEXGSS_CONTINUE) - m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host, - recv_token=srv_token)) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) self.transport.send_message(m) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) else: pass @@ -175,7 +192,8 @@ def _parse_kexgss_complete(self, m): """ Parse the SSH2_MSG_KEXGSS_COMPLETE message (client mode). - :param `.Message` m: The content of the SSH2_MSG_KEXGSS_COMPLETE message + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_COMPLETE message """ # client mode if self.transport.host_key is None: @@ -190,23 +208,29 @@ def _parse_kexgss_complete(self, m): if bool: srv_token = m.get_string() K = pow(self.f, self.x, self.P) - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) hm = Message() - hm.add(self.transport.local_version, self.transport.remote_version, - self.transport.local_kex_init, self.transport.remote_kex_init) + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + ) hm.add_string(self.transport.host_key.__str__()) hm.add_mpint(self.e) hm.add_mpint(self.f) hm.add_mpint(K) - self.transport._set_K_H(K, sha1(str(hm)).digest()) + H = sha1(str(hm)).digest() + self.transport._set_K_H(K, H) if srv_token is not None: - self.kexgss.ssh_init_sec_context(target=self.gss_host, - recv_token=srv_token) - self.kexgss.ssh_check_mic(mic_token, - self.transport.session_id) + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) else: - self.kexgss.ssh_check_mic(mic_token, - self.transport.session_id) + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True self.transport._activate_outbound() def _parse_kexgss_init(self, m): @@ -223,22 +247,29 @@ def _parse_kexgss_init(self, m): K = pow(self.e, self.x, self.P) self.transport.host_key = NullHostKey() key = self.transport.host_key.__str__() - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || e || f || K) hm = Message() - hm.add(self.transport.remote_version, self.transport.local_version, - self.transport.remote_kex_init, self.transport.local_kex_init) + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + ) hm.add_string(key) hm.add_mpint(self.e) hm.add_mpint(self.f) hm.add_mpint(K) H = sha1(hm.asbytes()).digest() self.transport._set_K_H(K, H) - srv_token = self.kexgss.ssh_accept_sec_context(self.gss_host, - client_token) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) m = Message() if self.kexgss._gss_srv_ctxt_status: - mic_token = self.kexgss.ssh_get_mic(self.transport.session_id, - gss_kex=True) + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) m.add_byte(c_MSG_KEXGSS_COMPLETE) m.add_mpint(self.f) m.add_string(mic_token) @@ -248,14 +279,15 @@ def _parse_kexgss_init(self, m): else: m.add_boolean(False) self.transport._send_message(m) + self.transport.gss_kex_used = True self.transport._activate_outbound() else: m.add_byte(c_MSG_KEXGSS_CONTINUE) m.add_string(srv_token) self.transport._send_message(m) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) def _parse_kexgss_error(self, m): """ @@ -271,11 +303,16 @@ def _parse_kexgss_error(self, m): maj_status = m.get_int() min_status = m.get_int() err_msg = m.get_string() - lang_tag = m.get_string() # we don't care about the language! - raise SSHException("GSS-API Error:\nMajor Status: %s\nMinor Status: %s\ - \nError Message: %s\n") % (str(maj_status), - str(min_status), - err_msg) + m.get_string() # we don't care about the language! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) class KexGSSGroup14(KexGSSGroup1): @@ -284,7 +321,8 @@ class KexGSSGroup14(KexGSSGroup1): in `RFC 4462 Section 2 `_ """ - P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF + + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF # noqa G = 2 NAME = "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==" @@ -294,6 +332,7 @@ class KexGSSGex(object): GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange as defined in `RFC 4462 Section 2 `_ """ + NAME = "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==" min_bits = 1024 max_bits = 8192 @@ -315,7 +354,6 @@ def start_kex(self): """ Start the GSS-API / SSPI Authenticated Diffie-Hellman Group Exchange """ - self.transport.gss_kex_used = True if self.transport.server_mode: self.transport._expect_packet(MSG_KEXGSS_GROUPREQ) return @@ -335,7 +373,7 @@ def parse_next(self, ptype, m): """ Parse the next packet. - :param char ptype: The type of the incomming packet + :param ptype: The (string) type of the incoming packet :param `.Message` m: The paket content """ if ptype == MSG_KEXGSS_GROUPREQ: @@ -352,7 +390,8 @@ def parse_next(self, ptype, m): return self._parse_kexgss_complete(m) elif ptype == MSG_KEXGSS_ERROR: return self._parse_kexgss_error(m) - raise SSHException('KexGex asked to handle packet type %d' % ptype) + msg = "KexGex asked to handle packet type {:d}" + raise SSHException(msg.format(ptype)) # ## internals... @@ -378,7 +417,8 @@ def _parse_kexgss_groupreq(self, m): """ Parse the SSH2_MSG_KEXGSS_GROUPREQ message (server mode). - :param `.Message` m: The content of the SSH2_MSG_KEXGSS_GROUPREQ message + :param `.Message` m: The content of the + SSH2_MSG_KEXGSS_GROUPREQ message """ minbits = m.get_int() preferredbits = m.get_int() @@ -402,8 +442,13 @@ def _parse_kexgss_groupreq(self, m): # generate prime pack = self.transport._get_modulus_pack() if pack is None: - raise SSHException('Can\'t do server-side gex with no modulus pack') - self.transport._log(DEBUG, 'Picking p (%d <= %d <= %d bits)' % (minbits, preferredbits, maxbits)) + raise SSHException("Can't do server-side gex with no modulus pack") + self.transport._log( + DEBUG, # noqa + "Picking p ({} <= {} <= {} bits)".format( + minbits, preferredbits, maxbits + ), + ) self.g, self.p = pack.get_modulus(minbits, preferredbits, maxbits) m = Message() m.add_byte(c_MSG_KEXGSS_GROUP) @@ -423,8 +468,13 @@ def _parse_kexgss_group(self, m): # reject if p's bit length < 1024 or > 8192 bitlen = util.bit_length(self.p) if (bitlen < 1024) or (bitlen > 8192): - raise SSHException('Server-generated gex p (don\'t ask) is out of range (%d bits)' % bitlen) - self.transport._log(DEBUG, 'Got server p (%d bits)' % bitlen) + raise SSHException( + "Server-generated gex p (don't ask) is out of range " + "({} bits)".format(bitlen) + ) + self.transport._log( + DEBUG, "Got server p ({} bits)".format(bitlen) + ) # noqa self._generate_x() # now compute e = g^x mod p self.e = pow(self.g, self.x, self.p) @@ -433,10 +483,12 @@ def _parse_kexgss_group(self, m): m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host)) m.add_mpint(self.e) self.transport._send_message(m) - self.transport._expect_packet(MSG_KEXGSS_HOSTKEY, - MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_HOSTKEY, + MSG_KEXGSS_CONTINUE, + MSG_KEXGSS_COMPLETE, + MSG_KEXGSS_ERROR, + ) def _parse_kexgss_gex_init(self, m): """ @@ -453,11 +505,16 @@ def _parse_kexgss_gex_init(self, m): K = pow(self.e, self.x, self.p) self.transport.host_key = NullHostKey() key = self.transport.host_key.__str__() - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa hm = Message() - hm.add(self.transport.remote_version, self.transport.local_version, - self.transport.remote_kex_init, self.transport.local_kex_init, - key) + hm.add( + self.transport.remote_version, + self.transport.local_version, + self.transport.remote_kex_init, + self.transport.local_kex_init, + key, + ) hm.add_int(self.min_bits) hm.add_int(self.preferred_bits) hm.add_int(self.max_bits) @@ -468,12 +525,14 @@ def _parse_kexgss_gex_init(self, m): hm.add_mpint(K) H = sha1(hm.asbytes()).digest() self.transport._set_K_H(K, H) - srv_token = self.kexgss.ssh_accept_sec_context(self.gss_host, - client_token) + srv_token = self.kexgss.ssh_accept_sec_context( + self.gss_host, client_token + ) m = Message() if self.kexgss._gss_srv_ctxt_status: - mic_token = self.kexgss.ssh_get_mic(self.transport.session_id, - gss_kex=True) + mic_token = self.kexgss.ssh_get_mic( + self.transport.session_id, gss_kex=True + ) m.add_byte(c_MSG_KEXGSS_COMPLETE) m.add_mpint(self.f) m.add_string(mic_token) @@ -483,14 +542,15 @@ def _parse_kexgss_gex_init(self, m): else: m.add_boolean(False) self.transport._send_message(m) + self.transport.gss_kex_used = True self.transport._activate_outbound() else: m.add_byte(c_MSG_KEXGSS_CONTINUE) m.add_string(srv_token) self.transport._send_message(m) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) def _parse_kexgss_hostkey(self, m): """ @@ -503,8 +563,7 @@ def _parse_kexgss_hostkey(self, m): self.transport.host_key = host_key sig = m.get_string() self.transport._verify_key(host_key, sig) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE) + self.transport._expect_packet(MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE) def _parse_kexgss_continue(self, m): """ @@ -516,12 +575,15 @@ def _parse_kexgss_continue(self, m): srv_token = m.get_string() m = Message() m.add_byte(c_MSG_KEXGSS_CONTINUE) - m.add_string(self.kexgss.ssh_init_sec_context(target=self.gss_host, - recv_token=srv_token)) + m.add_string( + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + ) self.transport.send_message(m) - self.transport._expect_packet(MSG_KEXGSS_CONTINUE, - MSG_KEXGSS_COMPLETE, - MSG_KEXGSS_ERROR) + self.transport._expect_packet( + MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR + ) else: pass @@ -543,11 +605,16 @@ def _parse_kexgss_complete(self, m): if (self.f < 1) or (self.f > self.p - 1): raise SSHException('Server kex "f" is out of range') K = pow(self.f, self.x, self.p) - # okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) + # okay, build up the hash H of + # (V_C || V_S || I_C || I_S || K_S || min || n || max || p || g || e || f || K) # noqa hm = Message() - hm.add(self.transport.local_version, self.transport.remote_version, - self.transport.local_kex_init, self.transport.remote_kex_init, - self.transport.host_key.__str__()) + hm.add( + self.transport.local_version, + self.transport.remote_version, + self.transport.local_kex_init, + self.transport.remote_kex_init, + self.transport.host_key.__str__(), + ) if not self.old_style: hm.add_int(self.min_bits) hm.add_int(self.preferred_bits) @@ -561,13 +628,13 @@ def _parse_kexgss_complete(self, m): H = sha1(hm.asbytes()).digest() self.transport._set_K_H(K, H) if srv_token is not None: - self.kexgss.ssh_init_sec_context(target=self.gss_host, - recv_token=srv_token) - self.kexgss.ssh_check_mic(mic_token, - self.transport.session_id) + self.kexgss.ssh_init_sec_context( + target=self.gss_host, recv_token=srv_token + ) + self.kexgss.ssh_check_mic(mic_token, H) else: - self.kexgss.ssh_check_mic(mic_token, - self.transport.session_id) + self.kexgss.ssh_check_mic(mic_token, H) + self.transport.gss_kex_used = True self.transport._activate_outbound() def _parse_kexgss_error(self, m): @@ -584,11 +651,16 @@ def _parse_kexgss_error(self, m): maj_status = m.get_int() min_status = m.get_int() err_msg = m.get_string() - lang_tag = m.get_string() # we don't care about the language! - raise SSHException("GSS-API Error:\nMajor Status: %s\nMinor Status: %s\ - \nError Message: %s\n") % (str(maj_status), - str(min_status), - err_msg) + m.get_string() # we don't care about the language (lang_tag)! + raise SSHException( + """GSS-API Error: +Major Status: {} +Minor Status: {} +Error Message: {} +""".format( + maj_status, min_status, err_msg + ) + ) class NullHostKey(object): @@ -597,6 +669,7 @@ class NullHostKey(object): in `RFC 4462 Section 5 `_ """ + def __init__(self): self.key = "" diff --git a/paramiko/message.py b/paramiko/message.py index bf4c6b95d..9771cfbc9 100644 --- a/paramiko/message.py +++ b/paramiko/message.py @@ -27,12 +27,12 @@ from paramiko.py3compat import long, BytesIO, u, integer_types -class Message (object): +class Message(object): """ An SSH2 message is a stream of bytes that encodes some combination of strings, integers, bools, and infinite-precision integers (known in Python as longs). This class builds or breaks down such a byte stream. - + Normally you don't need to deal with anything this low-level, but it's exposed for people implementing custom extensions, or features that paramiko doesn't support yet. @@ -63,7 +63,7 @@ def __repr__(self): """ Returns a string representation of this object, for debugging. """ - return 'paramiko.Message(' + repr(self.packet.getvalue()) + ')' + return "paramiko.Message(" + repr(self.packet.getvalue()) + ")" def asbytes(self): """ @@ -139,16 +139,13 @@ def get_adaptive_int(self): if byte == max_byte: return util.inflate_long(self.get_binary()) byte += self.get_bytes(3) - return struct.unpack('>I', byte)[0] + return struct.unpack(">I", byte)[0] def get_int(self): """ Fetch an int from the stream. - - @return: a 32-bit unsigned integer. - @rtype: int """ - return struct.unpack('>I', self.get_bytes(4))[0] + return struct.unpack(">I", self.get_bytes(4))[0] def get_int64(self): """ @@ -156,7 +153,7 @@ def get_int64(self): :return: a 64-bit unsigned integer (`long`). """ - return struct.unpack('>Q', self.get_bytes(8))[0] + return struct.unpack(">Q", self.get_bytes(8))[0] def get_mpint(self): """ @@ -176,39 +173,30 @@ def get_string(self): def get_text(self): """ - Fetch a string from the stream. This could be a byte string and may - contain unprintable characters. (It's not unheard of for a string to - contain another byte-stream Message.) - - @return: a string. - @rtype: string + Fetch a Unicode string from the stream. """ - return u(self.get_bytes(self.get_int())) - #return self.get_bytes(self.get_size()) + return u(self.get_string()) def get_binary(self): """ Fetch a string from the stream. This could be a byte string and may contain unprintable characters. (It's not unheard of for a string to contain another byte-stream Message.) - - @return: a string. - @rtype: string """ return self.get_bytes(self.get_int()) def get_list(self): """ - Fetch a `list` of `strings ` from the stream. - + Fetch a list of `strings ` from the stream. + These are trivially encoded as comma-separated values in a string. """ - return self.get_text().split(',') + return self.get_text().split(",") def add_bytes(self, b): """ Write bytes to the stream, without any formatting. - + :param str b: bytes to add """ self.packet.write(b) @@ -217,7 +205,7 @@ def add_bytes(self, b): def add_byte(self, b): """ Write a single byte to the stream, without any formatting. - + :param str b: byte to add """ self.packet.write(b) @@ -226,7 +214,7 @@ def add_byte(self, b): def add_boolean(self, b): """ Add a boolean value to the stream. - + :param bool b: boolean value to add """ if b: @@ -234,27 +222,27 @@ def add_boolean(self, b): else: self.packet.write(zero_byte) return self - + def add_int(self, n): """ Add an integer to the stream. - + :param int n: integer to add """ - self.packet.write(struct.pack('>I', n)) + self.packet.write(struct.pack(">I", n)) return self - + def add_adaptive_int(self, n): """ Add an integer to the stream. - + :param int n: integer to add """ if n >= Message.big_int: self.packet.write(max_byte) self.add_string(util.deflate_long(n)) else: - self.packet.write(struct.pack('>I', n)) + self.packet.write(struct.pack(">I", n)) return self def add_int64(self, n): @@ -263,14 +251,14 @@ def add_int64(self, n): :param long n: long int to add """ - self.packet.write(struct.pack('>Q', n)) + self.packet.write(struct.pack(">Q", n)) return self def add_mpint(self, z): """ Add a long int to the stream, encoded as an infinite-precision integer. This method only works on positive numbers. - + :param long z: long int to add """ self.add_string(util.deflate_long(z)) @@ -279,7 +267,7 @@ def add_mpint(self, z): def add_string(self, s): """ Add a string to the stream. - + :param str s: string to add """ s = asbytes(s) @@ -287,17 +275,17 @@ def add_string(self, s): self.packet.write(s) return self - def add_list(self, l): + def add_list(self, l): # noqa: E741 """ Add a list of strings to the stream. They are encoded identically to a single string of values separated by commas. (Yes, really, that's how SSH2 does it.) - - :param list l: list of strings to add + + :param l: list of strings to add """ - self.add_string(','.join(l)) + self.add_string(",".join(l)) return self - + def _add(self, i): if type(i) is bool: return self.add_boolean(i) @@ -315,7 +303,7 @@ def add(self, *seq): .. warning:: Longs are encoded non-deterministically. Don't use this method. - + :param seq: the sequence of items """ for item in seq: diff --git a/paramiko/packet.py b/paramiko/packet.py index c943fe3c3..12663168d 100644 --- a/paramiko/packet.py +++ b/paramiko/packet.py @@ -29,8 +29,15 @@ from hmac import HMAC from paramiko import util -from paramiko.common import linefeed_byte, cr_byte_value, asbytes, MSG_NAMES, \ - DEBUG, xffffffff, zero_byte +from paramiko.common import ( + linefeed_byte, + cr_byte_value, + asbytes, + MSG_NAMES, + DEBUG, + xffffffff, + zero_byte, +) from paramiko.py3compat import u, byte_ord from paramiko.ssh_exception import SSHException, ProxyCommandFailure from paramiko.message import Message @@ -40,11 +47,22 @@ def compute_hmac(key, message, digest_class): return HMAC(key, message, digest_class).digest() -class NeedRekeyException (Exception): +class NeedRekeyException(Exception): + """ + Exception indicating a rekey is needed. + """ + pass -class Packetizer (object): +def first_arg(e): + arg = None + if type(e.args) is tuple and len(e.args) > 0: + arg = e.args[0] + return arg + + +class Packetizer(object): """ Implementation of the base SSH packet protocol. """ @@ -54,8 +72,11 @@ class Packetizer (object): REKEY_PACKETS = pow(2, 29) REKEY_BYTES = pow(2, 29) - REKEY_PACKETS_OVERFLOW_MAX = pow(2, 29) # Allow receiving this many packets after a re-key request before terminating - REKEY_BYTES_OVERFLOW_MAX = pow(2, 29) # Allow receiving this many bytes after a re-key request before terminating + # Allow receiving this many packets after a re-key request before + # terminating + REKEY_PACKETS_OVERFLOW_MAX = pow(2, 29) + # Allow receiving this many bytes after a re-key request before terminating + REKEY_BYTES_OVERFLOW_MAX = pow(2, 29) def __init__(self, socket): self.__socket = socket @@ -90,6 +111,8 @@ def __init__(self, socket): self.__compress_engine_in = None self.__sequence_number_out = 0 self.__sequence_number_in = 0 + self.__etm_out = False + self.__etm_in = False # lock around outbound writes (packet computation) self.__write_lock = threading.RLock() @@ -113,9 +136,19 @@ def set_log(self, log): """ self.__logger = log - def set_outbound_cipher(self, block_engine, block_size, mac_engine, mac_size, mac_key, sdctr=False): + def set_outbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + sdctr=False, + etm=False, + ): """ Switch outbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH """ self.__block_engine_out = block_engine self.__sdctr_out = sdctr @@ -125,15 +158,26 @@ def set_outbound_cipher(self, block_engine, block_size, mac_engine, mac_size, ma self.__mac_key_out = mac_key self.__sent_bytes = 0 self.__sent_packets = 0 - # wait until the reset happens in both directions before clearing rekey flag + self.__etm_out = etm + # wait until the reset happens in both directions before clearing + # rekey flag self.__init_count |= 1 if self.__init_count == 3: self.__init_count = 0 self.__need_rekey = False - def set_inbound_cipher(self, block_engine, block_size, mac_engine, mac_size, mac_key): + def set_inbound_cipher( + self, + block_engine, + block_size, + mac_engine, + mac_size, + mac_key, + etm=False, + ): """ Switch inbound data cipher. + :param etm: Set encrypt-then-mac from OpenSSH """ self.__block_engine_in = block_engine self.__block_size_in = block_size @@ -144,7 +188,9 @@ def set_inbound_cipher(self, block_engine, block_size, mac_engine, mac_size, mac self.__received_packets = 0 self.__received_bytes_overflow = 0 self.__received_packets_overflow = 0 - # wait until the reset happens in both directions before clearing rekey flag + self.__etm_in = etm + # wait until the reset happens in both directions before clearing + # rekey flag self.__init_count |= 2 if self.__init_count == 3: self.__init_count = 0 @@ -237,8 +283,9 @@ def read_all(self, n, check_rekey=False): :param int n: number of bytes to read :return: the data read, as a `str` - :raises EOFError: - if the socket was closed before all the bytes could be read + :raises: + ``EOFError`` -- if the socket was closed before all the bytes could + be read """ out = bytes() # handle over-reading from reading the banner line @@ -262,9 +309,10 @@ def read_all(self, n, check_rekey=False): # on Linux, sometimes instead of socket.timeout, we get # EAGAIN. this is a bug in recent (> 2.6.9) kernels but # we need to work around it. - if (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EAGAIN): + arg = first_arg(e) + if arg == errno.EAGAIN: got_timeout = True - elif (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EINTR): + elif arg == errno.EINTR: # syscall interrupted; try again pass elif self.__closed: @@ -289,9 +337,10 @@ def write_all(self, out): except socket.timeout: retry_write = True except socket.error as e: - if (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EAGAIN): + arg = first_arg(e) + if arg == errno.EAGAIN: retry_write = True - elif (type(e.args) is tuple) and (len(e.args) > 0) and (e.args[0] == errno.EINTR): + elif arg == errno.EINTR: # syscall interrupted; try again retry_write = True else: @@ -307,11 +356,11 @@ def write_all(self, out): n = -1 else: if n == 0 and iteration_with_zero_as_return_value > 10: - # We shouldn't retry the write, but we didn't - # manage to send anything over the socket. This might be an - # indication that we have lost contact with the remote side, - # but are yet to receive an EOFError or other socket errors. - # Let's give it some iteration to try and catch up. + # We shouldn't retry the write, but we didn't + # manage to send anything over the socket. This might be an + # indication that we have lost contact with the remote + # side, but are yet to receive an EOFError or other socket + # errors. Let's give it some iteration to try and catch up. n = -1 iteration_with_zero_as_return_value += 1 if n < 0: @@ -327,10 +376,10 @@ def readline(self, timeout): line, so it's okay to attempt large reads. """ buf = self.__remainder - while not linefeed_byte in buf: + while linefeed_byte not in buf: buf += self._read_timeout(timeout) n = buf.index(linefeed_byte) - self.__remainder = buf[n + 1:] + self.__remainder = buf[n + 1 :] buf = buf[:n] if (len(buf) > 0) and (buf[-1] == cr_byte_value): buf = buf[:-1] @@ -346,7 +395,7 @@ def send_message(self, data): if cmd in MSG_NAMES: cmd_name = MSG_NAMES[cmd] else: - cmd_name = '$%x' % cmd + cmd_name = "${:x}".format(cmd) orig_len = len(data) self.__write_lock.acquire() try: @@ -354,26 +403,45 @@ def send_message(self, data): data = self.__compress_engine_out(data) packet = self._build_packet(data) if self.__dump_packets: - self._log(DEBUG, 'Write packet <%s>, length %d' % (cmd_name, orig_len)) - self._log(DEBUG, util.format_binary(packet, 'OUT: ')) + self._log( + DEBUG, + "Write packet <{}>, length {}".format(cmd_name, orig_len), + ) + self._log(DEBUG, util.format_binary(packet, "OUT: ")) if self.__block_engine_out is not None: - out = self.__block_engine_out.update(packet) + if self.__etm_out: + # packet length is not encrypted in EtM + out = packet[0:4] + self.__block_engine_out.update( + packet[4:] + ) + else: + out = self.__block_engine_out.update(packet) else: out = packet # + mac if self.__block_engine_out is not None: - payload = struct.pack('>I', self.__sequence_number_out) + packet - out += compute_hmac(self.__mac_key_out, payload, self.__mac_engine_out)[:self.__mac_size_out] - self.__sequence_number_out = (self.__sequence_number_out + 1) & xffffffff + packed = struct.pack(">I", self.__sequence_number_out) + payload = packed + (out if self.__etm_out else packet) + out += compute_hmac( + self.__mac_key_out, payload, self.__mac_engine_out + )[: self.__mac_size_out] + self.__sequence_number_out = ( + self.__sequence_number_out + 1 + ) & xffffffff self.write_all(out) self.__sent_bytes += len(out) self.__sent_packets += 1 - if (self.__sent_packets >= self.REKEY_PACKETS or self.__sent_bytes >= self.REKEY_BYTES)\ - and not self.__need_rekey: + sent_too_much = ( + self.__sent_packets >= self.REKEY_PACKETS + or self.__sent_bytes >= self.REKEY_BYTES + ) + if sent_too_much and not self.__need_rekey: # only ask once for rekeying - self._log(DEBUG, 'Rekeying (hit %d packets, %d bytes sent)' % - (self.__sent_packets, self.__sent_bytes)) + msg = "Rekeying (hit {} packets, {} bytes sent)" + self._log( + DEBUG, msg.format(self.__sent_packets, self.__sent_bytes) + ) self.__received_bytes_overflow = 0 self.__received_packets_overflow = 0 self._trigger_rekey() @@ -385,40 +453,78 @@ def read_message(self): Only one thread should ever be in this function (no other locking is done). - :raises SSHException: if the packet is mangled - :raises NeedRekeyException: if the transport should rekey + :raises: `.SSHException` -- if the packet is mangled + :raises: `.NeedRekeyException` -- if the transport should rekey """ header = self.read_all(self.__block_size_in, check_rekey=True) + if self.__etm_in: + packet_size = struct.unpack(">I", header[:4])[0] + remaining = packet_size - self.__block_size_in + 4 + packet = header[4:] + self.read_all(remaining, check_rekey=False) + mac = self.read_all(self.__mac_size_in, check_rekey=False) + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] + if not util.constant_time_bytes_eq(my_mac, mac): + raise SSHException("Mismatched MAC") + header = packet + if self.__block_engine_in is not None: header = self.__block_engine_in.update(header) if self.__dump_packets: - self._log(DEBUG, util.format_binary(header, 'IN: ')) - packet_size = struct.unpack('>I', header[:4])[0] - # leftover contains decrypted bytes from the first block (after the - # length field) - leftover = header[4:] - if (packet_size - len(leftover)) % self.__block_size_in != 0: - raise SSHException('Invalid packet blocking') - buf = self.read_all(packet_size + self.__mac_size_in - len(leftover)) - packet = buf[:packet_size - len(leftover)] - post_packet = buf[packet_size - len(leftover):] - if self.__block_engine_in is not None: - packet = self.__block_engine_in.update(packet) - if self.__dump_packets: - self._log(DEBUG, util.format_binary(packet, 'IN: ')) - packet = leftover + packet + self._log(DEBUG, util.format_binary(header, "IN: ")) - if self.__mac_size_in > 0: - mac = post_packet[:self.__mac_size_in] - mac_payload = struct.pack('>II', self.__sequence_number_in, packet_size) + packet - my_mac = compute_hmac(self.__mac_key_in, mac_payload, self.__mac_engine_in)[:self.__mac_size_in] + # When ETM is in play, we've already read the packet size & decrypted + # everything, so just set the packet back to the header we obtained. + if self.__etm_in: + packet = header + # Otherwise, use the older non-ETM logic + else: + packet_size = struct.unpack(">I", header[:4])[0] + + # leftover contains decrypted bytes from the first block (after the + # length field) + leftover = header[4:] + if (packet_size - len(leftover)) % self.__block_size_in != 0: + raise SSHException("Invalid packet blocking") + buf = self.read_all( + packet_size + self.__mac_size_in - len(leftover) + ) + packet = buf[: packet_size - len(leftover)] + post_packet = buf[packet_size - len(leftover) :] + + if self.__block_engine_in is not None: + packet = self.__block_engine_in.update(packet) + packet = leftover + packet + + if self.__dump_packets: + self._log(DEBUG, util.format_binary(packet, "IN: ")) + + if self.__mac_size_in > 0 and not self.__etm_in: + mac = post_packet[: self.__mac_size_in] + mac_payload = ( + struct.pack(">II", self.__sequence_number_in, packet_size) + + packet + ) + my_mac = compute_hmac( + self.__mac_key_in, mac_payload, self.__mac_engine_in + )[: self.__mac_size_in] if not util.constant_time_bytes_eq(my_mac, mac): - raise SSHException('Mismatched MAC') + raise SSHException("Mismatched MAC") padding = byte_ord(packet[0]) - payload = packet[1:packet_size - padding] + payload = packet[1 : packet_size - padding] if self.__dump_packets: - self._log(DEBUG, 'Got payload (%d bytes, %d padding)' % (packet_size, padding)) + self._log( + DEBUG, + "Got payload ({} bytes, {} padding)".format( + packet_size, padding + ), + ) if self.__compress_engine_in is not None: payload = self.__compress_engine_in(payload) @@ -436,14 +542,24 @@ def read_message(self): # dropping the connection self.__received_bytes_overflow += raw_packet_size self.__received_packets_overflow += 1 - if (self.__received_packets_overflow >= self.REKEY_PACKETS_OVERFLOW_MAX) or \ - (self.__received_bytes_overflow >= self.REKEY_BYTES_OVERFLOW_MAX): - raise SSHException('Remote transport is ignoring rekey requests') - elif (self.__received_packets >= self.REKEY_PACKETS) or \ - (self.__received_bytes >= self.REKEY_BYTES): + if ( + self.__received_packets_overflow + >= self.REKEY_PACKETS_OVERFLOW_MAX + ) or ( + self.__received_bytes_overflow >= self.REKEY_BYTES_OVERFLOW_MAX + ): + raise SSHException( + "Remote transport is ignoring rekey requests" + ) + elif (self.__received_packets >= self.REKEY_PACKETS) or ( + self.__received_bytes >= self.REKEY_BYTES + ): # only ask once for rekeying - self._log(DEBUG, 'Rekeying (hit %d packets, %d bytes received)' % - (self.__received_packets, self.__received_bytes)) + err = "Rekeying (hit {} packets, {} bytes received)" + self._log( + DEBUG, + err.format(self.__received_packets, self.__received_bytes), + ) self.__received_bytes_overflow = 0 self.__received_packets_overflow = 0 self._trigger_rekey() @@ -452,12 +568,15 @@ def read_message(self): if cmd in MSG_NAMES: cmd_name = MSG_NAMES[cmd] else: - cmd_name = '$%x' % cmd + cmd_name = "${:x}".format(cmd) if self.__dump_packets: - self._log(DEBUG, 'Read packet <%s>, length %d' % (cmd_name, len(payload))) + self._log( + DEBUG, + "Read packet <{}>, length {}".format(cmd_name, len(payload)), + ) return cmd, msg - ########## protected + # ...protected... def _log(self, level, msg): if self.__logger is None: @@ -469,8 +588,11 @@ def _log(self, level, msg): self.__logger.log(level, msg) def _check_keepalive(self): - if (not self.__keepalive_interval) or (not self.__block_engine_out) or \ - self.__need_rekey: + if ( + not self.__keepalive_interval + or not self.__block_engine_out + or self.__need_rekey + ): # wait till we're encrypting, and not in the middle of rekeying return now = time.time() @@ -489,8 +611,7 @@ def _read_timeout(self, timeout): except socket.timeout: pass except EnvironmentError as e: - if (type(e.args) is tuple and len(e.args) > 0 and - e.args[0] == errno.EINTR): + if first_arg(e) == errno.EINTR: pass else: raise @@ -504,13 +625,17 @@ def _read_timeout(self, timeout): def _build_packet(self, payload): # pad up at least 4 bytes, to nearest block-size (usually 8) bsize = self.__block_size_out - padding = 3 + bsize - ((len(payload) + 8) % bsize) - packet = struct.pack('>IB', len(payload) + padding + 1, padding) + # do not include payload length in computations for padding in EtM mode + # (payload length won't be encrypted) + addlen = 4 if self.__etm_out else 8 + padding = 3 + bsize - ((len(payload) + addlen) % bsize) + packet = struct.pack(">IB", len(payload) + padding + 1, padding) packet += payload if self.__sdctr_out or self.__block_engine_out is None: - # cute trick i caught openssh doing: if we're not encrypting or SDCTR mode (RFC4344), + # cute trick i caught openssh doing: if we're not encrypting or + # SDCTR mode (RFC4344), # don't waste random bytes for the padding - packet += (zero_byte * padding) + packet += zero_byte * padding else: packet += os.urandom(padding) return packet diff --git a/paramiko/pipe.py b/paramiko/pipe.py index 4f62d7c59..dda885da2 100644 --- a/paramiko/pipe.py +++ b/paramiko/pipe.py @@ -28,30 +28,29 @@ import sys import os import socket -from paramiko.py3compat import b def make_pipe(): - if sys.platform[:3] != 'win': + if sys.platform[:3] != "win": p = PosixPipe() else: p = WindowsPipe() return p -class PosixPipe (object): +class PosixPipe(object): def __init__(self): self._rfd, self._wfd = os.pipe() self._set = False self._forever = False self._closed = False - + def close(self): os.close(self._rfd) os.close(self._wfd) # used for unit tests: self._closed = True - + def fileno(self): return self._rfd @@ -60,75 +59,76 @@ def clear(self): return os.read(self._rfd, 1) self._set = False - + def set(self): if self._set or self._closed: return self._set = True - os.write(self._wfd, b'*') - + os.write(self._wfd, b"*") + def set_forever(self): self._forever = True self.set() -class WindowsPipe (object): +class WindowsPipe(object): """ On Windows, only an OS-level "WinSock" may be used in select(), but reads and writes must be to the actual socket object. """ + def __init__(self): serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - serv.bind(('127.0.0.1', 0)) + serv.bind(("127.0.0.1", 0)) serv.listen(1) - + # need to save sockets in _rsock/_wsock so they don't get closed self._rsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._rsock.connect(('127.0.0.1', serv.getsockname()[1])) - + self._rsock.connect(("127.0.0.1", serv.getsockname()[1])) + self._wsock, addr = serv.accept() serv.close() self._set = False self._forever = False self._closed = False - + def close(self): self._rsock.close() self._wsock.close() # used for unit tests: self._closed = True - + def fileno(self): return self._rsock.fileno() - def clear (self): + def clear(self): if not self._set or self._forever: return self._rsock.recv(1) self._set = False - - def set (self): + + def set(self): if self._set or self._closed: return self._set = True - self._wsock.send(b'*') + self._wsock.send(b"*") - def set_forever (self): + def set_forever(self): self._forever = True self.set() -class OrPipe (object): +class OrPipe(object): def __init__(self, pipe): self._set = False self._partner = None self._pipe = pipe - + def set(self): self._set = True if not self._partner._set: self._pipe.set() - + def clear(self): self._set = False if not self._partner._set: @@ -146,4 +146,3 @@ def make_or_pipe(pipe): p1._partner = p2 p2._partner = p1 return p1, p2 - diff --git a/paramiko/pkey.py b/paramiko/pkey.py index 0637a6f08..67945261d 100644 --- a/paramiko/pkey.py +++ b/paramiko/pkey.py @@ -24,6 +24,11 @@ from binascii import unhexlify import os from hashlib import md5 +import re +import struct + +import six +import bcrypt from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization @@ -31,8 +36,27 @@ from paramiko import util from paramiko.common import o600 -from paramiko.py3compat import u, encodebytes, decodebytes, b +from paramiko.py3compat import u, b, encodebytes, decodebytes, string_types from paramiko.ssh_exception import SSHException, PasswordRequiredException +from paramiko.message import Message + + +OPENSSH_AUTH_MAGIC = b"openssh-key-v1\x00" + + +def _unpad_openssh(data): + # At the moment, this is only used for unpadding private keys on disk. This + # really ought to be made constant time (possibly by upstreaming this logic + # into pyca/cryptography). + padding_length = six.indexbytes(data, -1) + if 0x20 <= padding_length < 0x7f: + return data # no padding, last byte part comment (printable ascii) + if padding_length > 15: + raise SSHException("Invalid key") + for i in range(padding_length): + if six.indexbytes(data, i - padding_length) != i + 1: + raise SSHException("Invalid key") + return data[:-padding_length] class PKey(object): @@ -42,19 +66,31 @@ class PKey(object): # known encryption types for private key files: _CIPHER_TABLE = { - 'AES-128-CBC': { - 'cipher': algorithms.AES, - 'keysize': 16, - 'blocksize': 16, - 'mode': modes.CBC + "AES-128-CBC": { + "cipher": algorithms.AES, + "keysize": 16, + "blocksize": 16, + "mode": modes.CBC, + }, + "AES-256-CBC": { + "cipher": algorithms.AES, + "keysize": 32, + "blocksize": 16, + "mode": modes.CBC, }, - 'DES-EDE3-CBC': { - 'cipher': algorithms.TripleDES, - 'keysize': 24, - 'blocksize': 8, - 'mode': modes.CBC + "DES-EDE3-CBC": { + "cipher": algorithms.TripleDES, + "keysize": 24, + "blocksize": 8, + "mode": modes.CBC, }, } + _PRIVATE_KEY_FORMAT_ORIGINAL = 1 + _PRIVATE_KEY_FORMAT_OPENSSH = 2 + BEGIN_TAG = re.compile( + r"^-{5}BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$" + ) + END_TAG = re.compile(r"^-{5}END (RSA|DSA|EC|OPENSSH) PRIVATE KEY-{5}\s*$") def __init__(self, msg=None, data=None): """ @@ -65,9 +101,10 @@ def __init__(self, msg=None, data=None): :param .Message msg: an optional SSH `.Message` containing a public key of this type. - :param str data: an optional string containing a public key of this type + :param str data: an optional string containing a public key + of this type - :raises SSHException: + :raises: `.SSHException` -- if a key cannot be created from the ``data`` or ``msg`` given, or no key was passed in. """ @@ -85,6 +122,8 @@ def __str__(self): return self.asbytes() # noinspection PyUnresolvedReferences + # TODO: The comparison functions should be removed as per: + # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons def __cmp__(self, other): """ Compare this key to another. Returns 0 if this key is equivalent to @@ -92,16 +131,23 @@ def __cmp__(self, other): of the key are compared, so a public key will compare equal to its corresponding private key. - :param .Pkey other: key to compare to. + :param .PKey other: key to compare to. """ hs = hash(self) ho = hash(other) if hs != ho: - return cmp(hs, ho) - return cmp(self.asbytes(), other.asbytes()) + return cmp(hs, ho) # noqa + return cmp(self.asbytes(), other.asbytes()) # noqa def __eq__(self, other): - return hash(self) == hash(other) + return self._fields == other._fields + + def __hash__(self): + return hash(self._fields) + + @property + def _fields(self): + raise NotImplementedError def get_name(self): """ @@ -111,7 +157,7 @@ def get_name(self): name of this private key type, in SSH terminology, as a `str` (for example, ``"ssh-rsa"``). """ - return '' + return "" def get_bits(self): """ @@ -148,15 +194,22 @@ def get_base64(self): :return: a base64 `string ` containing the public part of the key. """ - return u(encodebytes(self.asbytes())).replace('\n', '') + return u(encodebytes(self.asbytes())).replace("\n", "") - def sign_ssh_data(self, data): + def sign_ssh_data(self, data, algorithm=None): """ Sign a blob of data with this private key, and return a `.Message` representing an SSH signature message. - :param str data: the data to sign. + :param str data: + the data to sign. + :param str algorithm: + the signature algorithm to use, if different from the key's + internal name. Default: ``None``. :return: an SSH signature `message <.Message>`. + + .. versionchanged:: 2.9 + Added the ``algorithm`` kwarg. """ return bytes() @@ -188,10 +241,10 @@ def from_private_key_file(cls, filename, password=None): encrypted :return: a new `.PKey` based on the given private key - :raises IOError: if there was an error reading the file - :raises PasswordRequiredException: if the private key file is + :raises: ``IOError`` -- if there was an error reading the file + :raises: `.PasswordRequiredException` -- if the private key file is encrypted, and ``password`` is ``None`` - :raises SSHException: if the key file is invalid + :raises: `.SSHException` -- if the key file is invalid """ key = cls(filename=filename, password=password) return key @@ -209,10 +262,10 @@ def from_private_key(cls, file_obj, password=None): an optional password to use to decrypt the key, if it's encrypted :return: a new `.PKey` based on the given private key - :raises IOError: if there was an error reading the key - :raises PasswordRequiredException: + :raises: ``IOError`` -- if there was an error reading the key + :raises: `.PasswordRequiredException` -- if the private key file is encrypted, and ``password`` is ``None`` - :raises SSHException: if the key file is invalid + :raises: `.SSHException` -- if the key file is invalid """ key = cls(file_obj=file_obj, password=password) return key @@ -226,10 +279,10 @@ def write_private_key_file(self, filename, password=None): :param str password: an optional password to use to encrypt the key file - :raises IOError: if there was an error writing the file - :raises SSHException: if the key is invalid + :raises: ``IOError`` -- if there was an error writing the file + :raises: `.SSHException` -- if the key is invalid """ - raise Exception('Not implemented in PKey') + raise Exception("Not implemented in PKey") def write_private_key(self, file_obj, password=None): """ @@ -239,78 +292,115 @@ def write_private_key(self, file_obj, password=None): :param file_obj: the file-like object to write into :param str password: an optional password to use to encrypt the key - :raises IOError: if there was an error writing to the file - :raises SSHException: if the key is invalid + :raises: ``IOError`` -- if there was an error writing to the file + :raises: `.SSHException` -- if the key is invalid """ - raise Exception('Not implemented in PKey') + raise Exception("Not implemented in PKey") def _read_private_key_file(self, tag, filename, password=None): """ Read an SSH2-format private key file, looking for a string of the type ``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we find, and return it as a string. If the private key is encrypted and - ``password`` is not ``None``, the given password will be used to decrypt - the key (otherwise `.PasswordRequiredException` is thrown). + ``password`` is not ``None``, the given password will be used to + decrypt the key (otherwise `.PasswordRequiredException` is thrown). - :param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the data block. + :param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the + data block. :param str filename: name of the file to read. :param str password: an optional password to use to decrypt the key file, if it's encrypted. :return: data blob (`str`) that makes up the private key. - :raises IOError: if there was an error reading the file. - :raises PasswordRequiredException: if the private key file is + :raises: ``IOError`` -- if there was an error reading the file. + :raises: `.PasswordRequiredException` -- if the private key file is encrypted, and ``password`` is ``None``. - :raises SSHException: if the key file is invalid. + :raises: `.SSHException` -- if the key file is invalid. """ - with open(filename, 'r') as f: + with open(filename, "r") as f: data = self._read_private_key(tag, f, password) return data def _read_private_key(self, tag, f, password=None): lines = f.readlines() + + # find the BEGIN tag start = 0 - while (start < len(lines)) and (lines[start].strip() != '-----BEGIN ' + tag + ' PRIVATE KEY-----'): + m = self.BEGIN_TAG.match(lines[start]) + line_range = len(lines) - 1 + while start < line_range and not m: start += 1 - if start >= len(lines): - raise SSHException('not a valid ' + tag + ' private key file') + m = self.BEGIN_TAG.match(lines[start]) + start += 1 + keytype = m.group(1) if m else None + if start >= len(lines) or keytype is None: + raise SSHException("not a valid {} private key file".format(tag)) + + # find the END tag + end = start + m = self.END_TAG.match(lines[end]) + while end < line_range and not m: + end += 1 + m = self.END_TAG.match(lines[end]) + + if keytype == tag: + data = self._read_private_key_pem(lines, end, password) + pkformat = self._PRIVATE_KEY_FORMAT_ORIGINAL + elif keytype == "OPENSSH": + data = self._read_private_key_openssh(lines[start:end], password) + pkformat = self._PRIVATE_KEY_FORMAT_OPENSSH + else: + raise SSHException( + "encountered {} key, expected {} key".format(keytype, tag) + ) + + return pkformat, data + + def _got_bad_key_format_id(self, id_): + err = "{}._read_private_key() spat out an unknown key format id '{}'" + raise SSHException(err.format(self.__class__.__name__, id_)) + + def _read_private_key_pem(self, lines, end, password): + start = 0 # parse any headers first headers = {} start += 1 while start < len(lines): - l = lines[start].split(': ') - if len(l) == 1: + line = lines[start].split(": ") + if len(line) == 1: break - headers[l[0].lower()] = l[1].strip() + headers[line[0].lower()] = line[1].strip() start += 1 - # find end - end = start - while end < len(lines) and lines[end].strip() != '-----END ' + tag + ' PRIVATE KEY-----': - end += 1 # if we trudged to the end of the file, just try to cope. try: - data = decodebytes(b(''.join(lines[start:end]))) + data = decodebytes(b("".join(lines[start:end]))) except base64.binascii.Error as e: - raise SSHException('base64 decoding error: ' + str(e)) - if 'proc-type' not in headers: + raise SSHException("base64 decoding error: {}".format(e)) + if "proc-type" not in headers: # unencryped: done return data # encrypted keyfile: will need a password - if headers['proc-type'] != '4,ENCRYPTED': - raise SSHException('Unknown private key structure "%s"' % headers['proc-type']) + proc_type = headers["proc-type"] + if proc_type != "4,ENCRYPTED": + raise SSHException( + 'Unknown private key structure "{}"'.format(proc_type) + ) try: - encryption_type, saltstr = headers['dek-info'].split(',') + encryption_type, saltstr = headers["dek-info"].split(",") except: raise SSHException("Can't parse DEK-info in private key file") if encryption_type not in self._CIPHER_TABLE: - raise SSHException('Unknown private key cipher "%s"' % encryption_type) - # if no password was passed in, raise an exception pointing out that we need one + raise SSHException( + 'Unknown private key cipher "{}"'.format(encryption_type) + ) + # if no password was passed in, + # raise an exception pointing out that we need one if password is None: - raise PasswordRequiredException('Private key file is encrypted') - cipher = self._CIPHER_TABLE[encryption_type]['cipher'] - keysize = self._CIPHER_TABLE[encryption_type]['keysize'] - mode = self._CIPHER_TABLE[encryption_type]['mode'] + raise PasswordRequiredException("Private key file is encrypted") + cipher = self._CIPHER_TABLE[encryption_type]["cipher"] + keysize = self._CIPHER_TABLE[encryption_type]["keysize"] + mode = self._CIPHER_TABLE[encryption_type]["mode"] salt = unhexlify(b(saltstr)) key = util.generate_key_bytes(md5, salt, password, keysize) decryptor = Cipher( @@ -318,6 +408,141 @@ def _read_private_key(self, tag, f, password=None): ).decryptor() return decryptor.update(data) + decryptor.finalize() + def _read_private_key_openssh(self, lines, password): + """ + Read the new OpenSSH SSH2 private key format available + since OpenSSH version 6.5 + Reference: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.key + """ + try: + data = decodebytes(b("".join(lines))) + except base64.binascii.Error as e: + raise SSHException("base64 decoding error: {}".format(e)) + + # read data struct + auth_magic = data[:15] + if auth_magic != OPENSSH_AUTH_MAGIC: + raise SSHException("unexpected OpenSSH key header encountered") + + cstruct = self._uint32_cstruct_unpack(data[15:], "sssur") + cipher, kdfname, kdf_options, num_pubkeys, remainder = cstruct + # For now, just support 1 key. + if num_pubkeys > 1: + raise SSHException( + "unsupported: private keyfile has multiple keys" + ) + pubkey, privkey_blob = self._uint32_cstruct_unpack(remainder, "ss") + + if kdfname == b("bcrypt"): + if cipher == b("aes256-cbc"): + mode = modes.CBC + elif cipher == b("aes256-ctr"): + mode = modes.CTR + else: + raise SSHException( + "unknown cipher `{}` used in private key file".format( + cipher.decode("utf-8") + ) + ) + # Encrypted private key. + # If no password was passed in, raise an exception pointing + # out that we need one + if password is None: + raise PasswordRequiredException( + "private key file is encrypted" + ) + + # Unpack salt and rounds from kdfoptions + salt, rounds = self._uint32_cstruct_unpack(kdf_options, "su") + + # run bcrypt kdf to derive key and iv/nonce (32 + 16 bytes) + key_iv = bcrypt.kdf( + b(password), + b(salt), + 48, + rounds, + # We can't control how many rounds are on disk, so no sense + # warning about it. + ignore_few_rounds=True, + ) + key = key_iv[:32] + iv = key_iv[32:] + + # decrypt private key blob + decryptor = Cipher( + algorithms.AES(key), mode(iv), default_backend() + ).decryptor() + decrypted_privkey = decryptor.update(privkey_blob) + decrypted_privkey += decryptor.finalize() + elif cipher == b("none") and kdfname == b("none"): + # Unencrypted private key + decrypted_privkey = privkey_blob + else: + raise SSHException( + "unknown cipher or kdf used in private key file" + ) + + # Unpack private key and verify checkints + cstruct = self._uint32_cstruct_unpack(decrypted_privkey, "uusr") + checkint1, checkint2, keytype, keydata = cstruct + + if checkint1 != checkint2: + raise SSHException( + "OpenSSH private key file checkints do not match" + ) + + return _unpad_openssh(keydata) + + def _uint32_cstruct_unpack(self, data, strformat): + """ + Used to read new OpenSSH private key format. + Unpacks a c data structure containing a mix of 32-bit uints and + variable length strings prefixed by 32-bit uint size field, + according to the specified format. Returns the unpacked vars + in a tuple. + Format strings: + s - denotes a string + i - denotes a long integer, encoded as a byte string + u - denotes a 32-bit unsigned integer + r - the remainder of the input string, returned as a string + """ + arr = [] + idx = 0 + try: + for f in strformat: + if f == "s": + # string + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + arr.append(s) + if f == "i": + # long integer + s_size = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + s = data[idx : idx + s_size] + idx += s_size + i = util.inflate_long(s, True) + arr.append(i) + elif f == "u": + # 32-bit unsigned int + u = struct.unpack(">L", data[idx : idx + 4])[0] + idx += 4 + arr.append(u) + elif f == "r": + # remainder as string + s = data[idx:] + arr.append(s) + break + except Exception as e: + # PKey-consuming code frequently wants to save-and-skip-over issues + # with loading keys, and uses SSHException as the (really friggin + # awful) signal for this. So for now...we do this. + raise SSHException(str(e)) + return tuple(arr) + def _write_private_key_file(self, filename, key, format, password=None): """ Write an SSH2-format private key file in a form that can be read by @@ -331,21 +556,201 @@ def _write_private_key_file(self, filename, key, format, password=None): :param str data: data blob that makes up the private key. :param str password: an optional password to use to encrypt the file. - :raises IOError: if there was an error writing the file. - """ - with open(filename, 'w', o600) as f: - # grrr... the mode doesn't always take hold + :raises: ``IOError`` -- if there was an error writing the file. + """ + # Ensure that we create new key files directly with a user-only mode, + # instead of opening, writing, then chmodding, which leaves us open to + # CVE-2022-24302. + # NOTE: O_TRUNC is a noop on new files, and O_CREAT is a noop on + # existing files, so using all 3 in both cases is fine. Ditto the use + # of the 'mode' argument; it should be safe to give even for existing + # files (though it will not act like a chmod in that case). + kwargs = dict(flags=os.O_WRONLY | os.O_TRUNC | os.O_CREAT, mode=o600) + # NOTE: yea, you still gotta inform the FLO that it is in "write" mode + with os.fdopen(os.open(filename, **kwargs), mode="w") as f: + # TODO 3.0: remove the now redundant chmod os.chmod(filename, o600) - self._write_private_key(f, key, format) + self._write_private_key(f, key, format, password=password) def _write_private_key(self, f, key, format, password=None): if password is None: encryption = serialization.NoEncryption() else: - encryption = serialization.BestEncryption(password) + encryption = serialization.BestAvailableEncryption(b(password)) + + f.write( + key.private_bytes( + serialization.Encoding.PEM, format, encryption + ).decode() + ) + + def _check_type_and_load_cert(self, msg, key_type, cert_type): + """ + Perform message type-checking & optional certificate loading. + + This includes fast-forwarding cert ``msg`` objects past the nonce, so + that the subsequent fields are the key numbers; thus the caller may + expect to treat the message as key material afterwards either way. + + The obtained key type is returned for classes which need to know what + it was (e.g. ECDSA.) + """ + # Normalization; most classes have a single key type and give a string, + # but eg ECDSA is a 1:N mapping. + key_types = key_type + cert_types = cert_type + if isinstance(key_type, string_types): + key_types = [key_types] + if isinstance(cert_types, string_types): + cert_types = [cert_types] + # Can't do much with no message, that should've been handled elsewhere + if msg is None: + raise SSHException("Key object may not be empty") + # First field is always key type, in either kind of object. (make sure + # we rewind before grabbing it - sometimes caller had to do their own + # introspection first!) + msg.rewind() + type_ = msg.get_text() + # Regular public key - nothing special to do besides the implicit + # type check. + if type_ in key_types: + pass + # OpenSSH-compatible certificate - store full copy as .public_blob + # (so signing works correctly) and then fast-forward past the + # nonce. + elif type_ in cert_types: + # This seems the cleanest way to 'clone' an already-being-read + # message; they're *IO objects at heart and their .getvalue() + # always returns the full value regardless of pointer position. + self.load_certificate(Message(msg.asbytes())) + # Read out nonce as it comes before the public numbers. + # TODO: usefully interpret it & other non-public-number fields + # (requires going back into per-type subclasses.) + msg.get_string() + else: + err = "Invalid key (class: {}, data type: {}" + raise SSHException(err.format(self.__class__.__name__, type_)) + + def load_certificate(self, value): + """ + Supplement the private key contents with data loaded from an OpenSSH + public key (``.pub``) or certificate (``-cert.pub``) file, a string + containing such a file, or a `.Message` object. + + The .pub contents adds no real value, since the private key + file includes sufficient information to derive the public + key info. For certificates, however, this can be used on + the client side to offer authentication requests to the server + based on certificate instead of raw public key. + + See: + https://github.com/openssh/openssh-portable/blob/master/PROTOCOL.certkeys + + Note: very little effort is made to validate the certificate contents, + that is for the server to decide if it is good enough to authenticate + successfully. + """ + if isinstance(value, Message): + constructor = "from_message" + elif os.path.isfile(value): + constructor = "from_file" + else: + constructor = "from_string" + blob = getattr(PublicBlob, constructor)(value) + if not blob.key_type.startswith(self.get_name()): + err = "PublicBlob type {} incompatible with key type {}" + raise ValueError(err.format(blob.key_type, self.get_name())) + self.public_blob = blob + + +# General construct for an OpenSSH style Public Key blob +# readable from a one-line file of the format: +# [] +# Of little value in the case of standard public keys +# {ssh-rsa, ssh-dss, ssh-ecdsa, ssh-ed25519}, but should +# provide rudimentary support for {*-cert.v01} +class PublicBlob(object): + """ + OpenSSH plain public key or OpenSSH signed public key (certificate). + + Tries to be as dumb as possible and barely cares about specific + per-key-type data. + + .. note:: + + Most of the time you'll want to call `from_file`, `from_string` or + `from_message` for useful instantiation, the main constructor is + basically "I should be using ``attrs`` for this." + """ + + def __init__(self, type_, blob, comment=None): + """ + Create a new public blob of given type and contents. + + :param str type_: Type indicator, eg ``ssh-rsa``. + :param blob: The blob bytes themselves. + :param str comment: A comment, if one was given (e.g. file-based.) + """ + self.key_type = type_ + self.key_blob = blob + self.comment = comment + + @classmethod + def from_file(cls, filename): + """ + Create a public blob from a ``-cert.pub``-style file on disk. + """ + with open(filename) as f: + string = f.read() + return cls.from_string(string) + + @classmethod + def from_string(cls, string): + """ + Create a public blob from a ``-cert.pub``-style string. + """ + fields = string.split(None, 2) + if len(fields) < 2: + msg = "Not enough fields for public blob: {}" + raise ValueError(msg.format(fields)) + key_type = fields[0] + key_blob = decodebytes(b(fields[1])) + try: + comment = fields[2].strip() + except IndexError: + comment = None + # Verify that the blob message first (string) field matches the + # key_type + m = Message(key_blob) + blob_type = m.get_text() + if blob_type != key_type: + deets = "key type={!r}, but blob type={!r}".format( + key_type, blob_type + ) + raise ValueError("Invalid PublicBlob contents: {}".format(deets)) + # All good? All good. + return cls(type_=key_type, blob=key_blob, comment=comment) + + @classmethod + def from_message(cls, message): + """ + Create a public blob from a network `.Message`. + + Specifically, a cert-bearing pubkey auth packet, because by definition + OpenSSH-style certificates 'are' their own network representation." + """ + type_ = message.get_text() + return cls(type_=type_, blob=message.asbytes()) + + def __str__(self): + ret = "{} public key/certificate".format(self.key_type) + if self.comment: + ret += "- {}".format(self.comment) + return ret + + def __eq__(self, other): + # Just piggyback on Message/BytesIO, since both of these should be one. + return self and other and self.key_blob == other.key_blob - f.write(key.private_bytes( - serialization.Encoding.PEM, - format, - encryption - ).decode()) + def __ne__(self, other): + return not self == other diff --git a/paramiko/primes.py b/paramiko/primes.py index d0e175757..8dff7683a 100644 --- a/paramiko/primes.py +++ b/paramiko/primes.py @@ -25,7 +25,6 @@ from paramiko import util from paramiko.py3compat import byte_mask, long from paramiko.ssh_exception import SSHException -from paramiko.common import * def _roll_random(n): @@ -50,7 +49,7 @@ def _roll_random(n): return num -class ModulusPack (object): +class ModulusPack(object): """ convenience object for holding the contents of the /etc/ssh/moduli file, on systems that have such a file. @@ -62,7 +61,15 @@ def __init__(self): self.discarded = [] def _parse_modulus(self, line): - timestamp, mod_type, tests, tries, size, generator, modulus = line.split() + ( + timestamp, + mod_type, + tests, + tries, + size, + generator, + modulus, + ) = line.split() mod_type = int(mod_type) tests = int(tests) tries = int(tries) @@ -74,8 +81,14 @@ def _parse_modulus(self, line): # type 2 (meets basic structural requirements) # test 4 (more than just a small-prime sieve) # tries < 100 if test & 4 (at least 100 tries of miller-rabin) - if (mod_type < 2) or (tests < 4) or ((tests & 4) and (tests < 8) and (tries < 100)): - self.discarded.append((modulus, 'does not meet basic requirements')) + if ( + mod_type < 2 + or tests < 4 + or (tests & 4 and tests < 8 and tries < 100) + ): + self.discarded.append( + (modulus, "does not meet basic requirements") + ) return if generator == 0: generator = 2 @@ -85,7 +98,9 @@ def _parse_modulus(self, line): # this is okay. bl = util.bit_length(modulus) if (bl != size) and (bl != size + 1): - self.discarded.append((modulus, 'incorrectly reported bit length %d' % size)) + self.discarded.append( + (modulus, "incorrectly reported bit length {}".format(size)) + ) return if bl not in self.pack: self.pack[bl] = [] @@ -96,10 +111,10 @@ def read_file(self, filename): :raises IOError: passed from any file operations that fail. """ self.pack = {} - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: line = line.strip() - if (len(line) == 0) or (line[0] == '#'): + if (len(line) == 0) or (line[0] == "#"): continue try: self._parse_modulus(line) @@ -109,7 +124,7 @@ def read_file(self, filename): def get_modulus(self, min, prefer, max): bitsizes = sorted(self.pack.keys()) if len(bitsizes) == 0: - raise SSHException('no moduli available') + raise SSHException("no moduli available") good = -1 # find nearest bitsize >= preferred for b in bitsizes: diff --git a/paramiko/proxy.py b/paramiko/proxy.py index d3ae436f5..077e8e352 100644 --- a/paramiko/proxy.py +++ b/paramiko/proxy.py @@ -17,15 +17,21 @@ # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. -from datetime import datetime import os -from shlex import split as shlsplit +import shlex import signal -from subprocess import Popen, PIPE from select import select import socket import time +# Try-and-ignore import so platforms w/o subprocess (eg Google App Engine) can +# still import paramiko. +subprocess, subprocess_import_error = None, None +try: + import subprocess +except ImportError as e: + subprocess_import_error = e + from paramiko.ssh_exception import ProxyCommandFailure from paramiko.util import ClosingContextManager @@ -41,6 +47,7 @@ class ProxyCommand(ClosingContextManager): Instances of this class may be used as context managers. """ + def __init__(self, command_line): """ Create a new CommandProxy instance. The instance created by this @@ -49,9 +56,16 @@ class can be passed as an argument to the `.Transport` class. :param str command_line: the command that should be executed and used as the proxy. """ - self.cmd = shlsplit(command_line) - self.process = Popen(self.cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, - bufsize=0) + if subprocess is None: + raise subprocess_import_error + self.cmd = shlex.split(command_line) + self.process = subprocess.Popen( + self.cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=0, + ) self.timeout = None def send(self, content): @@ -68,7 +82,7 @@ def send(self, content): # died and we can't proceed. The best option here is to # raise an exception informing the user that the informed # ProxyCommand is not working. - raise ProxyCommandFailure(' '.join(self.cmd), e.strerror) + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) return len(content) def recv(self, size): @@ -80,21 +94,21 @@ def recv(self, size): :return: the string of bytes read, which may be shorter than requested """ try: - buffer = b'' + buffer = b"" start = time.time() while len(buffer) < size: select_timeout = None if self.timeout is not None: - elapsed = (time.time() - start) + elapsed = time.time() - start if elapsed >= self.timeout: raise socket.timeout() select_timeout = self.timeout - elapsed - r, w, x = select( - [self.process.stdout], [], [], select_timeout) + r, w, x = select([self.process.stdout], [], [], select_timeout) if r and r[0] == self.process.stdout: buffer += os.read( - self.process.stdout.fileno(), size - len(buffer)) + self.process.stdout.fileno(), size - len(buffer) + ) return buffer except socket.timeout: if buffer: @@ -102,10 +116,19 @@ def recv(self, size): return buffer raise # socket.timeout is a subclass of IOError except IOError as e: - raise ProxyCommandFailure(' '.join(self.cmd), e.strerror) + raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) def close(self): os.kill(self.process.pid, signal.SIGTERM) + @property + def closed(self): + return self.process.returncode is not None + + @property + def _closed(self): + # Concession to Python 3 socket-like API + return self.closed + def settimeout(self, timeout): self.timeout = timeout diff --git a/paramiko/py3compat.py b/paramiko/py3compat.py index 6fafc31d2..d6d136fbd 100644 --- a/paramiko/py3compat.py +++ b/paramiko/py3compat.py @@ -1,88 +1,95 @@ -import sys import base64 - -__all__ = ['PY2', 'string_types', 'integer_types', 'text_type', 'bytes_types', 'bytes', 'long', 'input', - 'decodebytes', 'encodebytes', 'bytestring', 'byte_ord', 'byte_chr', 'byte_mask', - 'b', 'u', 'b2s', 'StringIO', 'BytesIO', 'is_callable', 'MAXSIZE', 'next', 'builtins'] +import sys +import time + +__all__ = [ + "BytesIO", + "MAXSIZE", + "PY2", + "StringIO", + "b", + "b2s", + "builtins", + "byte_chr", + "byte_mask", + "byte_ord", + "bytes", + "bytes_types", + "decodebytes", + "encodebytes", + "input", + "integer_types", + "is_callable", + "long", + "next", + "string_types", + "text_type", + "u", +] PY2 = sys.version_info[0] < 3 if PY2: - string_types = basestring - text_type = unicode + import __builtin__ as builtins + import locale + + string_types = basestring # NOQA + text_type = unicode # NOQA bytes_types = str bytes = str - integer_types = (int, long) - long = long - input = raw_input + integer_types = (int, long) # NOQA + long = long # NOQA + input = raw_input # NOQA decodebytes = base64.decodestring encodebytes = base64.encodestring - import __builtin__ as builtins - - def bytestring(s): # NOQA - if isinstance(s, unicode): - return s.encode('utf-8') + if isinstance(s, unicode): # NOQA + return s.encode("utf-8") return s - byte_ord = ord # NOQA byte_chr = chr # NOQA - def byte_mask(c, mask): return chr(ord(c) & mask) - - def b(s, encoding='utf8'): # NOQA + def b(s, encoding="utf8"): # NOQA """cast unicode or bytes to bytes""" if isinstance(s, str): return s - elif isinstance(s, unicode): + elif isinstance(s, unicode): # NOQA return s.encode(encoding) - elif isinstance(s, buffer): + elif isinstance(s, buffer): # NOQA return s else: - raise TypeError("Expected unicode or bytes, got %r" % s) - + raise TypeError("Expected unicode or bytes, got {!r}".format(s)) - def u(s, encoding='utf8'): # NOQA + def u(s, encoding="utf8"): # NOQA """cast bytes or unicode to unicode""" if isinstance(s, str): return s.decode(encoding) - elif isinstance(s, unicode): + elif isinstance(s, unicode): # NOQA return s - elif isinstance(s, buffer): + elif isinstance(s, buffer): # NOQA return s.decode(encoding) else: - raise TypeError("Expected unicode or bytes, got %r" % s) - + raise TypeError("Expected unicode or bytes, got {!r}".format(s)) def b2s(s): return s + import cStringIO - try: - import cStringIO - - StringIO = cStringIO.StringIO # NOQA - except ImportError: - import StringIO - - StringIO = StringIO.StringIO # NOQA - + StringIO = cStringIO.StringIO BytesIO = StringIO - def is_callable(c): # NOQA return callable(c) - def get_next(c): # NOQA return c.next - def next(c): return c.next() @@ -91,34 +98,40 @@ class X(object): def __len__(self): return 1 << 31 - try: len(X()) except OverflowError: # 32-bit - MAXSIZE = int((1 << 31) - 1) # NOQA + MAXSIZE = int((1 << 31) - 1) # NOQA else: # 64-bit - MAXSIZE = int((1 << 63) - 1) # NOQA + MAXSIZE = int((1 << 63) - 1) # NOQA del X + + def strftime(format, t): + """Same as time.strftime but returns unicode.""" + _, encoding = locale.getlocale(locale.LC_TIME) + return time.strftime(format, t).decode(encoding or "ascii") + + else: import collections import struct import builtins + string_types = str text_type = str bytes = bytes bytes_types = bytes integer_types = int + class long(int): pass + input = input decodebytes = base64.decodebytes encodebytes = base64.encodebytes - def bytestring(s): - return s - def byte_ord(c): # In case we're handed a string instead of an int. if not isinstance(c, int): @@ -127,36 +140,37 @@ def byte_ord(c): def byte_chr(c): assert isinstance(c, int) - return struct.pack('B', c) + return struct.pack("B", c) def byte_mask(c, mask): assert isinstance(c, int) - return struct.pack('B', c & mask) + return struct.pack("B", c & mask) - def b(s, encoding='utf8'): + def b(s, encoding="utf8"): """cast unicode or bytes to bytes""" if isinstance(s, bytes): return s elif isinstance(s, str): return s.encode(encoding) else: - raise TypeError("Expected unicode or bytes, got %r" % s) + raise TypeError("Expected unicode or bytes, got {!r}".format(s)) - def u(s, encoding='utf8'): + def u(s, encoding="utf8"): """cast bytes or unicode to unicode""" if isinstance(s, bytes): return s.decode(encoding) elif isinstance(s, str): return s else: - raise TypeError("Expected unicode or bytes, got %r" % s) + raise TypeError("Expected unicode or bytes, got {!r}".format(s)) def b2s(s): return s.decode() if isinstance(s, bytes) else s import io - StringIO = io.StringIO # NOQA - BytesIO = io.BytesIO # NOQA + + StringIO = io.StringIO # NOQA + BytesIO = io.BytesIO # NOQA def is_callable(c): return isinstance(c, collections.Callable) @@ -166,4 +180,6 @@ def get_next(c): next = next - MAXSIZE = sys.maxsize # NOQA + MAXSIZE = sys.maxsize # NOQA + + strftime = time.strftime # NOQA diff --git a/paramiko/resource.py b/paramiko/resource.py deleted file mode 100644 index 9809afbe7..000000000 --- a/paramiko/resource.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) 2003-2007 Robey Pointer -# -# This file is part of paramiko. -# -# Paramiko is free software; you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation; either version 2.1 of the License, or (at your option) -# any later version. -# -# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Paramiko; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. - -""" -Resource manager. -""" - -import weakref - - -class ResourceManager (object): - """ - A registry of objects and resources that should be closed when those - objects are deleted. - - This is meant to be a safer alternative to Python's ``__del__`` method, - which can cause reference cycles to never be collected. Objects registered - with the ResourceManager can be collected but still free resources when - they die. - - Resources are registered using `register`, and when an object is garbage - collected, each registered resource is closed by having its ``close()`` - method called. Multiple resources may be registered per object, but a - resource will only be closed once, even if multiple objects register it. - (The last object to register it wins.) - """ - - def __init__(self): - self._table = {} - - def register(self, obj, resource): - """ - Register a resource to be closed with an object is collected. - - When the given ``obj`` is garbage-collected by the Python interpreter, - the ``resource`` will be closed by having its ``close()`` method called. - Any exceptions are ignored. - - :param object obj: the object to track - :param object resource: - the resource to close when the object is collected - """ - def callback(ref): - try: - resource.close() - except: - pass - del self._table[id(resource)] - - # keep the weakref in a table so it sticks around long enough to get - # its callback called. :) - self._table[id(resource)] = weakref.ref(obj, callback) - - -# singleton -ResourceManager = ResourceManager() diff --git a/paramiko/rsakey.py b/paramiko/rsakey.py index bc9053f5e..26c5313cc 100644 --- a/paramiko/rsakey.py +++ b/paramiko/rsakey.py @@ -20,13 +20,14 @@ RSA keys. """ -from cryptography.exceptions import InvalidSignature +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import rsa, padding from paramiko.message import Message from paramiko.pkey import PKey +from paramiko.py3compat import PY2 from paramiko.ssh_exception import SSHException @@ -36,8 +37,26 @@ class RSAKey(PKey): data. """ - def __init__(self, msg=None, data=None, filename=None, password=None, key=None, file_obj=None): + HASHES = { + "ssh-rsa": hashes.SHA1, + "ssh-rsa-cert-v01@openssh.com": hashes.SHA1, + "rsa-sha2-256": hashes.SHA256, + "rsa-sha2-256-cert-v01@openssh.com": hashes.SHA256, + "rsa-sha2-512": hashes.SHA512, + "rsa-sha2-512-cert-v01@openssh.com": hashes.SHA512, + } + + def __init__( + self, + msg=None, + data=None, + filename=None, + password=None, + key=None, + file_obj=None, + ): self.key = None + self.public_blob = None if file_obj is not None: self._from_private_key(file_obj, password) return @@ -49,10 +68,13 @@ def __init__(self, msg=None, data=None, filename=None, password=None, key=None, if key is not None: self.key = key else: - if msg is None: - raise SSHException('Key object may not be empty') - if msg.get_text() != 'ssh-rsa': - raise SSHException('Invalid key') + self._check_type_and_load_cert( + msg=msg, + # NOTE: this does NOT change when using rsa2 signatures; it's + # purely about key loading, not exchange or verification + key_type="ssh-rsa", + cert_type="ssh-rsa-cert-v01@openssh.com", + ) self.key = rsa.RSAPublicNumbers( e=msg.get_mpint(), n=msg.get_mpint() ).public_key(default_backend()) @@ -70,22 +92,29 @@ def public_numbers(self): def asbytes(self): m = Message() - m.add_string('ssh-rsa') + m.add_string("ssh-rsa") m.add_mpint(self.public_numbers.e) m.add_mpint(self.public_numbers.n) return m.asbytes() def __str__(self): - return self.asbytes() + # NOTE: as per inane commentary in #853, this appears to be the least + # crummy way to get a representation that prints identical to Python + # 2's previous behavior, on both interpreters. + # TODO: replace with a nice clean fingerprint display or something + if PY2: + # Can't just return the .decode below for Py2 because stuff still + # tries stuffing it into ASCII for whatever godforsaken reason + return self.asbytes() + else: + return self.asbytes().decode("utf8", errors="ignore") - def __hash__(self): - h = hash(self.get_name()) - h = h * 37 + hash(self.public_numbers.e) - h = h * 37 + hash(self.public_numbers.n) - return hash(h) + @property + def _fields(self): + return (self.get_name(), self.public_numbers.e, self.public_numbers.n) def get_name(self): - return 'ssh-rsa' + return "ssh-rsa" def get_bits(self): return self.size @@ -93,34 +122,32 @@ def get_bits(self): def can_sign(self): return isinstance(self.key, rsa.RSAPrivateKey) - def sign_ssh_data(self, data): - signer = self.key.signer( + def sign_ssh_data(self, data, algorithm="ssh-rsa"): + sig = self.key.sign( + data, padding=padding.PKCS1v15(), - algorithm=hashes.SHA1(), + algorithm=self.HASHES[algorithm](), ) - signer.update(data) - sig = signer.finalize() - m = Message() - m.add_string('ssh-rsa') + m.add_string(algorithm) m.add_string(sig) return m def verify_ssh_sig(self, data, msg): - if msg.get_text() != 'ssh-rsa': + sig_algorithm = msg.get_text() + if sig_algorithm not in self.HASHES: return False key = self.key if isinstance(key, rsa.RSAPrivateKey): key = key.public_key() - verifier = key.verifier( - signature=msg.get_binary(), - padding=padding.PKCS1v15(), - algorithm=hashes.SHA1(), - ) - verifier.update(data) try: - verifier.verify() + key.verify( + msg.get_binary(), + data, + padding.PKCS1v15(), + self.HASHES[sig_algorithm](), + ) except InvalidSignature: return False else: @@ -131,7 +158,7 @@ def write_private_key_file(self, filename, password=None): filename, self.key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) def write_private_key(self, file_obj, password=None): @@ -139,7 +166,7 @@ def write_private_key(self, file_obj, password=None): file_obj, self.key, serialization.PrivateFormat.TraditionalOpenSSL, - password=password + password=password, ) @staticmethod @@ -149,7 +176,7 @@ def generate(bits, progress_func=None): generate a new host key or authentication key. :param int bits: number of bits the generated key should be. - :param function progress_func: Unused + :param progress_func: Unused :return: new `.RSAKey` private key """ key = rsa.generate_private_key( @@ -157,23 +184,38 @@ def generate(bits, progress_func=None): ) return RSAKey(key=key) - ### internals... + # ...internals... def _from_private_key_file(self, filename, password): - data = self._read_private_key_file('RSA', filename, password) + data = self._read_private_key_file("RSA", filename, password) self._decode_key(data) def _from_private_key(self, file_obj, password): - data = self._read_private_key('RSA', file_obj, password) + data = self._read_private_key("RSA", file_obj, password) self._decode_key(data) def _decode_key(self, data): - try: - key = serialization.load_der_private_key( - data, password=None, backend=default_backend() - ) - except ValueError as e: - raise SSHException(str(e)) - + pkformat, data = data + if pkformat == self._PRIVATE_KEY_FORMAT_ORIGINAL: + try: + key = serialization.load_der_private_key( + data, password=None, backend=default_backend() + ) + except (ValueError, TypeError, UnsupportedAlgorithm) as e: + raise SSHException(str(e)) + elif pkformat == self._PRIVATE_KEY_FORMAT_OPENSSH: + n, e, d, iqmp, p, q = self._uint32_cstruct_unpack(data, "iiiiii") + public_numbers = rsa.RSAPublicNumbers(e=e, n=n) + key = rsa.RSAPrivateNumbers( + p=p, + q=q, + d=d, + dmp1=d % (p - 1), + dmq1=d % (q - 1), + iqmp=iqmp, + public_numbers=public_numbers, + ).private_key(default_backend()) + else: + self._got_bad_key_format_id(pkformat) assert isinstance(key, rsa.RSAPrivateKey) self.key = key diff --git a/paramiko/server.py b/paramiko/server.py index f79a17484..2fe9cc192 100644 --- a/paramiko/server.py +++ b/paramiko/server.py @@ -22,11 +22,17 @@ import threading from paramiko import util -from paramiko.common import DEBUG, ERROR, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, AUTH_FAILED, AUTH_SUCCESSFUL +from paramiko.common import ( + DEBUG, + ERROR, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + AUTH_FAILED, + AUTH_SUCCESSFUL, +) from paramiko.py3compat import string_types -class ServerInterface (object): +class ServerInterface(object): """ This class defines an interface for controlling the behavior of Paramiko in server mode. @@ -69,7 +75,7 @@ def check_channel_request(self, kind, chanid): - ``OPEN_FAILED_CONNECT_FAILED`` - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` - ``OPEN_FAILED_RESOURCE_SHORTAGE`` - + The default implementation always returns ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. @@ -96,22 +102,22 @@ def get_allowed_auths(self, username): :param str username: the username requesting authentication. :return: a comma-separated `str` of authentication types """ - return 'password' + return "password" def check_auth_none(self, username): """ Determine if a client may open channels with no (further) authentication. - Return `.AUTH_FAILED` if the client must authenticate, or - `.AUTH_SUCCESSFUL` if it's okay for the client to not + Return ``AUTH_FAILED`` if the client must authenticate, or + ``AUTH_SUCCESSFUL`` if it's okay for the client to not authenticate. - The default implementation always returns `.AUTH_FAILED`. + The default implementation always returns ``AUTH_FAILED``. :param str username: the username of the client. :return: - `.AUTH_FAILED` if the authentication fails; `.AUTH_SUCCESSFUL` if + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if it succeeds. :rtype: int """ @@ -122,21 +128,21 @@ def check_auth_password(self, username, password): Determine if a given username and password supplied by the client is acceptable for use in authentication. - Return `.AUTH_FAILED` if the password is not accepted, - `.AUTH_SUCCESSFUL` if the password is accepted and completes - the authentication, or `.AUTH_PARTIALLY_SUCCESSFUL` if your + Return ``AUTH_FAILED`` if the password is not accepted, + ``AUTH_SUCCESSFUL`` if the password is accepted and completes + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your authentication is stateful, and this key is accepted for authentication, but more authentication is required. (In this latter case, `get_allowed_auths` will be called to report to the client what options it has for continuing the authentication.) - The default implementation always returns `.AUTH_FAILED`. + The default implementation always returns ``AUTH_FAILED``. :param str username: the username of the authenticating client. :param str password: the password given by the client. :return: - `.AUTH_FAILED` if the authentication fails; `.AUTH_SUCCESSFUL` if - it succeeds; `.AUTH_PARTIALLY_SUCCESSFUL` if the password auth is + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the password auth is successful, but authentication must continue. :rtype: int """ @@ -149,9 +155,9 @@ def check_auth_publickey(self, username, key): check the username and key and decide if you would accept a signature made using this key. - Return `.AUTH_FAILED` if the key is not accepted, - `.AUTH_SUCCESSFUL` if the key is accepted and completes the - authentication, or `.AUTH_PARTIALLY_SUCCESSFUL` if your + Return ``AUTH_FAILED`` if the key is not accepted, + ``AUTH_SUCCESSFUL`` if the key is accepted and completes the + authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your authentication is stateful, and this password is accepted for authentication, but more authentication is required. (In this latter case, `get_allowed_auths` will be called to report to the client what @@ -160,54 +166,54 @@ def check_auth_publickey(self, username, key): Note that you don't have to actually verify any key signtature here. If you're willing to accept the key, Paramiko will do the work of verifying the client's signature. - - The default implementation always returns `.AUTH_FAILED`. + + The default implementation always returns ``AUTH_FAILED``. :param str username: the username of the authenticating client :param .PKey key: the key object provided by the client :return: - `.AUTH_FAILED` if the client can't authenticate with this key; - `.AUTH_SUCCESSFUL` if it can; `.AUTH_PARTIALLY_SUCCESSFUL` if it + ``AUTH_FAILED`` if the client can't authenticate with this key; + ``AUTH_SUCCESSFUL`` if it can; ``AUTH_PARTIALLY_SUCCESSFUL`` if it can authenticate with this key but must continue with authentication :rtype: int """ return AUTH_FAILED - + def check_auth_interactive(self, username, submethods): """ Begin an interactive authentication challenge, if supported. You should override this method in server mode if you want to support the ``"keyboard-interactive"`` auth type, which requires you to send a series of questions for the client to answer. - - Return `.AUTH_FAILED` if this auth method isn't supported. Otherwise, + + Return ``AUTH_FAILED`` if this auth method isn't supported. Otherwise, you should return an `.InteractiveQuery` object containing the prompts and instructions for the user. The response will be sent via a call to `check_auth_interactive_response`. - - The default implementation always returns `.AUTH_FAILED`. - + + The default implementation always returns ``AUTH_FAILED``. + :param str username: the username of the authenticating client :param str submethods: a comma-separated list of methods preferred by the client (usually empty) :return: - `.AUTH_FAILED` if this auth method isn't supported; otherwise an + ``AUTH_FAILED`` if this auth method isn't supported; otherwise an object containing queries for the user :rtype: int or `.InteractiveQuery` """ return AUTH_FAILED - + def check_auth_interactive_response(self, responses): """ Continue or finish an interactive authentication challenge, if supported. You should override this method in server mode if you want to support the ``"keyboard-interactive"`` auth type. - - Return `.AUTH_FAILED` if the responses are not accepted, - `.AUTH_SUCCESSFUL` if the responses are accepted and complete - the authentication, or `.AUTH_PARTIALLY_SUCCESSFUL` if your + + Return ``AUTH_FAILED`` if the responses are not accepted, + ``AUTH_SUCCESSFUL`` if the responses are accepted and complete + the authentication, or ``AUTH_PARTIALLY_SUCCESSFUL`` if your authentication is stateful, and this set of responses is accepted for authentication, but more authentication is required. (In this latter case, `get_allowed_auths` will be called to report to the client what @@ -218,21 +224,21 @@ def check_auth_interactive_response(self, responses): client to respond with more answers, calling this method again. This cycle can continue indefinitely. - The default implementation always returns `.AUTH_FAILED`. + The default implementation always returns ``AUTH_FAILED``. - :param list responses: list of `str` responses from the client + :param responses: list of `str` responses from the client :return: - `.AUTH_FAILED` if the authentication fails; `.AUTH_SUCCESSFUL` if - it succeeds; `.AUTH_PARTIALLY_SUCCESSFUL` if the interactive auth + ``AUTH_FAILED`` if the authentication fails; ``AUTH_SUCCESSFUL`` if + it succeeds; ``AUTH_PARTIALLY_SUCCESSFUL`` if the interactive auth is successful, but authentication must continue; otherwise an object containing queries for the user :rtype: int or `.InteractiveQuery` """ return AUTH_FAILED - def check_auth_gssapi_with_mic(self, username, - gss_authenticated=AUTH_FAILED, - cc_file=None): + def check_auth_gssapi_with_mic( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): """ Authenticate the given user to the server if he is a valid krb5 principal. @@ -240,8 +246,8 @@ def check_auth_gssapi_with_mic(self, username, :param str username: The username of the authenticating client :param int gss_authenticated: The result of the krb5 authentication :param str cc_filename: The krb5 client credentials cache filename - :return: `.AUTH_FAILED` if the user is not authenticated otherwise - `.AUTH_SUCCESSFUL` + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` :rtype: int :note: Kerberos credential delegation is not supported. :see: `.ssh_gss` @@ -250,18 +256,19 @@ def check_auth_gssapi_with_mic(self, username, We don't check if the krb5 principal is allowed to log in on the server, because there is no way to do that in python. So if you develop your own SSH server with paramiko for a cetain - plattform like Linux, you should call C{krb5_kuserok()} in your - local kerberos library to make sure that the krb5_principal has - an account on the server and is allowed to log in as a user. - :see: `http://www.unix.com/man-page/all/3/krb5_kuserok/` + plattform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed to + log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ """ if gss_authenticated == AUTH_SUCCESSFUL: return AUTH_SUCCESSFUL return AUTH_FAILED - def check_auth_gssapi_keyex(self, username, - gss_authenticated=AUTH_FAILED, - cc_file=None): + def check_auth_gssapi_keyex( + self, username, gss_authenticated=AUTH_FAILED, cc_file=None + ): """ Authenticate the given user to the server if he is a valid krb5 principal and GSS-API Key Exchange was performed. @@ -271,20 +278,21 @@ def check_auth_gssapi_keyex(self, username, :param str username: The username of the authenticating client :param int gss_authenticated: The result of the krb5 authentication :param str cc_filename: The krb5 client credentials cache filename - :return: `.AUTH_FAILED` if the user is not authenticated otherwise - `.AUTH_SUCCESSFUL` + :return: ``AUTH_FAILED`` if the user is not authenticated otherwise + ``AUTH_SUCCESSFUL`` :rtype: int :note: Kerberos credential delegation is not supported. - :see: `.ssh_gss` `.kex_gss` + :see: `.ssh_gss` `.kex_gss` :note: : We are just checking in L{AuthHandler} that the given user is a valid krb5 principal! We don't check if the krb5 principal is allowed to log in on the server, because there is no way to do that in python. So if you develop your own SSH server with paramiko for a cetain - plattform like Linux, you should call C{krb5_kuserok()} in your - local kerberos library to make sure that the krb5_principal has - an account on the server and is allowed to log in as a user. - :see: `http://www.unix.com/man-page/all/3/krb5_kuserok/` + plattform like Linux, you should call C{krb5_kuserok()} in + your local kerberos library to make sure that the + krb5_principal has an account on the server and is allowed + to log in as a user. + :see: http://www.unix.com/man-page/all/3/krb5_kuserok/ """ if gss_authenticated == AUTH_SUCCESSFUL: return AUTH_SUCCESSFUL @@ -296,14 +304,12 @@ def enable_auth_gssapi(self): authentication. The default implementation always returns false. - :return: True if GSSAPI authentication is enabled otherwise false - :rtype: Boolean - :see: : `.ssh_gss` + :returns bool: Whether GSSAPI authentication is enabled. + :see: `.ssh_gss` """ UseGSSAPI = False - GSSAPICleanupCredentials = False return UseGSSAPI - + def check_port_forward_request(self, address, port): """ Handle a request for port forwarding. The client is asking that @@ -312,11 +318,11 @@ def check_port_forward_request(self, address, port): address (any address associated with this server) and a port of ``0`` indicates that no specific port is requested (usually the OS will pick a port). - + The default implementation always returns ``False``, rejecting the port forwarding request. If the request is accepted, you should return the port opened for listening. - + :param str address: the requested address :param int port: the requested port :return: @@ -324,18 +330,18 @@ def check_port_forward_request(self, address, port): to reject """ return False - + def cancel_port_forward_request(self, address, port): """ The client would like to cancel a previous port-forwarding request. If the given address and port is being forwarded across this ssh connection, the port should be closed. - + :param str address: the forwarded address :param int port: the forwarded port """ pass - + def check_global_request(self, kind, msg): """ Handle a global request of the given ``kind``. This method is called @@ -354,7 +360,7 @@ def check_global_request(self, kind, msg): The default implementation always returns ``False``, indicating that it does not support any global requests. - + .. note:: Port forwarding requests are handled separately, in `check_port_forward_request`. @@ -366,10 +372,11 @@ def check_global_request(self, kind, msg): """ return False - ### Channel requests + # ...Channel requests... - def check_channel_pty_request(self, channel, term, width, height, pixelwidth, pixelheight, - modes): + def check_channel_pty_request( + self, channel, term, width, height, pixelwidth, pixelheight, modes + ): """ Determine if a pseudo-terminal of the given dimensions (usually requested for shell access) can be provided on the given channel. @@ -385,7 +392,7 @@ def check_channel_pty_request(self, channel, term, width, height, pixelwidth, pi :param int pixelheight: height of screen in pixels, if known (may be ``0`` if unknown). :return: - ``True`` if the psuedo-terminal has been allocated; ``False`` + ``True`` if the pseudo-terminal has been allocated; ``False`` otherwise. """ return False @@ -411,20 +418,20 @@ def check_channel_exec_request(self, channel, command): Determine if a shell command will be executed for the client. If this method returns ``True``, the channel should be connected to the stdin, stdout, and stderr of the shell command. - + The default implementation always returns ``False``. - + :param .Channel channel: the `.Channel` the request arrived on. :param str command: the command to execute. :return: ``True`` if this channel is now hooked up to the stdin, stdout, and stderr of the executing command; ``False`` if the command will not be executed. - + .. versionadded:: 1.1 """ return False - + def check_channel_subsystem_request(self, channel, name): """ Determine if a requested subsystem will be provided to the client on @@ -447,14 +454,17 @@ def check_channel_subsystem_request(self, channel, name): ``True`` if this channel is now hooked up to the requested subsystem; ``False`` if that subsystem can't or won't be provided. """ - handler_class, larg, kwarg = channel.get_transport()._get_subsystem_handler(name) + transport = channel.get_transport() + handler_class, larg, kwarg = transport._get_subsystem_handler(name) if handler_class is None: return False handler = handler_class(channel, name, self, *larg, **kwarg) handler.start() return True - def check_channel_window_change_request(self, channel, width, height, pixelwidth, pixelheight): + def check_channel_window_change_request( + self, channel, width, height, pixelwidth, pixelheight + ): """ Determine if the pseudo-terminal on the given channel can be resized. This only makes sense if a pty was previously allocated on it. @@ -471,15 +481,22 @@ def check_channel_window_change_request(self, channel, width, height, pixelwidth :return: ``True`` if the terminal was resized; ``False`` if not. """ return False - - def check_channel_x11_request(self, channel, single_connection, auth_protocol, auth_cookie, screen_number): + + def check_channel_x11_request( + self, + channel, + single_connection, + auth_protocol, + auth_cookie, + screen_number, + ): """ Determine if the client will be provided with an X11 session. If this method returns ``True``, X11 applications should be routed through new SSH channels, using `.Transport.open_x11_channel`. - + The default implementation always returns ``False``. - + :param .Channel channel: the `.Channel` the X11 request arrived on :param bool single_connection: ``True`` if only a single X11 channel should be opened, else @@ -529,7 +546,7 @@ def check_channel_direct_tcpip_request(self, chanid, origin, destination): - ``OPEN_FAILED_CONNECT_FAILED`` - ``OPEN_FAILED_UNKNOWN_CHANNEL_TYPE`` - ``OPEN_FAILED_RESOURCE_SHORTAGE`` - + The default implementation always returns ``OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED``. @@ -562,19 +579,33 @@ def check_channel_env_request(self, channel, name, value): """ return False + def get_banner(self): + """ + A pre-login banner to display to the user. The message may span + multiple lines separated by crlf pairs. The language should be in + rfc3066 style, for example: en-US + + The default implementation always returns ``(None, None)``. + + :returns: A tuple containing the banner and language code. -class InteractiveQuery (object): + .. versionadded:: 2.3 + """ + return (None, None) + + +class InteractiveQuery(object): """ A query (set of prompts) for a user during interactive authentication. """ - - def __init__(self, name='', instructions='', *prompts): + + def __init__(self, name="", instructions="", *prompts): """ Create a new interactive query to send to the client. The name and instructions are optional, but are generally displayed to the end user. A list of prompts may be included, or they may be added via the `add_prompt` method. - + :param str name: name of this query :param str instructions: user instructions (usually short) about this query @@ -588,12 +619,12 @@ def __init__(self, name='', instructions='', *prompts): self.add_prompt(x) else: self.add_prompt(x[0], x[1]) - + def add_prompt(self, prompt, echo=True): """ Add a prompt to this query. The prompt should be a (reasonably short) string. Multiple prompts can be added to the same query. - + :param str prompt: the user prompt :param bool echo: ``True`` (default) if the user's response should be echoed; @@ -602,7 +633,7 @@ def add_prompt(self, prompt, echo=True): self.prompts.append((prompt, echo)) -class SubsystemHandler (threading.Thread): +class SubsystemHandler(threading.Thread): """ Handler for a subsytem in server mode. If you create a subclass of this class and pass it to `.Transport.set_subsystem_handler`, an object of this @@ -616,15 +647,17 @@ class will be created for each request for this subsystem. Each new object ``MP3Handler`` will be created, and `start_subsystem` will be called on it from a new thread. """ + def __init__(self, channel, name, server): """ Create a new handler for a channel. This is used by `.ServerInterface` to start up a new handler when a channel requests this subsystem. You don't need to override this method, but if you do, be sure to pass the - ``channel`` and ``name`` parameters through to the original ``__init__`` - method here. + ``channel`` and ``name`` parameters through to the original + ``__init__`` method here. - :param .Channel channel: the channel associated with this subsystem request. + :param .Channel channel: the channel associated with this + subsystem request. :param str name: name of the requested subsystem. :param .ServerInterface server: the server object for the session that started this subsystem @@ -634,7 +667,7 @@ def __init__(self, channel, name, server): self.__transport = channel.get_transport() self.__name = name self.__server = server - + def get_server(self): """ Return the `.ServerInterface` object associated with this channel and @@ -644,11 +677,17 @@ def get_server(self): def _run(self): try: - self.__transport._log(DEBUG, 'Starting handler for subsystem %s' % self.__name) + self.__transport._log( + DEBUG, "Starting handler for subsystem {}".format(self.__name) + ) self.start_subsystem(self.__name, self.__transport, self.__channel) except Exception as e: - self.__transport._log(ERROR, 'Exception in subsystem handler for "%s": %s' % - (self.__name, str(e))) + self.__transport._log( + ERROR, + 'Exception in subsystem handler for "{}": {}'.format( + self.__name, e + ), + ) self.__transport._log(ERROR, util.tb_strings()) try: self.finish_subsystem() @@ -663,8 +702,8 @@ def start_subsystem(self, name, transport, channel): subsystem is finished, this method will return. After this method returns, the channel is closed. - The combination of ``transport`` and ``channel`` are unique; this handler - corresponds to exactly one `.Channel` on one `.Transport`. + The combination of ``transport`` and ``channel`` are unique; this + handler corresponds to exactly one `.Channel` on one `.Transport`. .. note:: It is the responsibility of this method to exit if the underlying @@ -676,7 +715,8 @@ def start_subsystem(self, name, transport, channel): :param str name: name of the requested subsystem. :param .Transport transport: the server-mode `.Transport`. - :param .Channel channel: the channel associated with this subsystem request. + :param .Channel channel: the channel associated with this subsystem + request. """ pass diff --git a/paramiko/sftp.py b/paramiko/sftp.py index f44a804d9..25debc85f 100644 --- a/paramiko/sftp.py +++ b/paramiko/sftp.py @@ -26,25 +26,54 @@ from paramiko.py3compat import byte_chr, byte_ord -CMD_INIT, CMD_VERSION, CMD_OPEN, CMD_CLOSE, CMD_READ, CMD_WRITE, CMD_LSTAT, CMD_FSTAT, \ - CMD_SETSTAT, CMD_FSETSTAT, CMD_OPENDIR, CMD_READDIR, CMD_REMOVE, CMD_MKDIR, \ - CMD_RMDIR, CMD_REALPATH, CMD_STAT, CMD_RENAME, CMD_READLINK, CMD_SYMLINK = range(1, 21) -CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS = range(101, 106) -CMD_EXTENDED, CMD_EXTENDED_REPLY = range(200, 202) +( + CMD_INIT, + CMD_VERSION, + CMD_OPEN, + CMD_CLOSE, + CMD_READ, + CMD_WRITE, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_OPENDIR, + CMD_READDIR, + CMD_REMOVE, + CMD_MKDIR, + CMD_RMDIR, + CMD_REALPATH, + CMD_STAT, + CMD_RENAME, + CMD_READLINK, + CMD_SYMLINK, +) = range(1, 21) +(CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS) = range(101, 106) +(CMD_EXTENDED, CMD_EXTENDED_REPLY) = range(200, 202) SFTP_OK = 0 -SFTP_EOF, SFTP_NO_SUCH_FILE, SFTP_PERMISSION_DENIED, SFTP_FAILURE, SFTP_BAD_MESSAGE, \ - SFTP_NO_CONNECTION, SFTP_CONNECTION_LOST, SFTP_OP_UNSUPPORTED = range(1, 9) - -SFTP_DESC = ['Success', - 'End of file', - 'No such file', - 'Permission denied', - 'Failure', - 'Bad message', - 'No connection', - 'Connection lost', - 'Operation unsupported'] +( + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, + SFTP_FAILURE, + SFTP_BAD_MESSAGE, + SFTP_NO_CONNECTION, + SFTP_CONNECTION_LOST, + SFTP_OP_UNSUPPORTED, +) = range(1, 9) + +SFTP_DESC = [ + "Success", + "End of file", + "No such file", + "Permission denied", + "Failure", + "Bad message", + "No connection", + "Connection lost", + "Operation unsupported", +] SFTP_FLAG_READ = 0x1 SFTP_FLAG_WRITE = 0x2 @@ -58,54 +87,54 @@ # for debugging CMD_NAMES = { - CMD_INIT: 'init', - CMD_VERSION: 'version', - CMD_OPEN: 'open', - CMD_CLOSE: 'close', - CMD_READ: 'read', - CMD_WRITE: 'write', - CMD_LSTAT: 'lstat', - CMD_FSTAT: 'fstat', - CMD_SETSTAT: 'setstat', - CMD_FSETSTAT: 'fsetstat', - CMD_OPENDIR: 'opendir', - CMD_READDIR: 'readdir', - CMD_REMOVE: 'remove', - CMD_MKDIR: 'mkdir', - CMD_RMDIR: 'rmdir', - CMD_REALPATH: 'realpath', - CMD_STAT: 'stat', - CMD_RENAME: 'rename', - CMD_READLINK: 'readlink', - CMD_SYMLINK: 'symlink', - CMD_STATUS: 'status', - CMD_HANDLE: 'handle', - CMD_DATA: 'data', - CMD_NAME: 'name', - CMD_ATTRS: 'attrs', - CMD_EXTENDED: 'extended', - CMD_EXTENDED_REPLY: 'extended_reply' + CMD_INIT: "init", + CMD_VERSION: "version", + CMD_OPEN: "open", + CMD_CLOSE: "close", + CMD_READ: "read", + CMD_WRITE: "write", + CMD_LSTAT: "lstat", + CMD_FSTAT: "fstat", + CMD_SETSTAT: "setstat", + CMD_FSETSTAT: "fsetstat", + CMD_OPENDIR: "opendir", + CMD_READDIR: "readdir", + CMD_REMOVE: "remove", + CMD_MKDIR: "mkdir", + CMD_RMDIR: "rmdir", + CMD_REALPATH: "realpath", + CMD_STAT: "stat", + CMD_RENAME: "rename", + CMD_READLINK: "readlink", + CMD_SYMLINK: "symlink", + CMD_STATUS: "status", + CMD_HANDLE: "handle", + CMD_DATA: "data", + CMD_NAME: "name", + CMD_ATTRS: "attrs", + CMD_EXTENDED: "extended", + CMD_EXTENDED_REPLY: "extended_reply", } -class SFTPError (Exception): +class SFTPError(Exception): pass -class BaseSFTP (object): +class BaseSFTP(object): def __init__(self): - self.logger = util.get_logger('paramiko.sftp') + self.logger = util.get_logger("paramiko.sftp") self.sock = None self.ultra_debug = False - ### internals... + # ...internals... def _send_version(self): - self._send_packet(CMD_INIT, struct.pack('>I', _VERSION)) + self._send_packet(CMD_INIT, struct.pack(">I", _VERSION)) t, data = self._read_packet() if t != CMD_VERSION: - raise SFTPError('Incompatible sftp protocol') - version = struct.unpack('>I', data[:4])[0] + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] # if version != _VERSION: # raise SFTPError('Incompatible sftp protocol') return version @@ -115,16 +144,16 @@ def _send_server_version(self): # client finishes sending INIT. t, data = self._read_packet() if t != CMD_INIT: - raise SFTPError('Incompatible sftp protocol') - version = struct.unpack('>I', data[:4])[0] + raise SFTPError("Incompatible sftp protocol") + version = struct.unpack(">I", data[:4])[0] # advertise that we support "check-file" - extension_pairs = ['check-file', 'md5,sha1'] + extension_pairs = ["check-file", "md5,sha1"] msg = Message() msg.add_int(_VERSION) msg.add(*extension_pairs) self._send_packet(CMD_VERSION, msg) return version - + def _log(self, level, msg, *args): self.logger.log(level, msg, *args) @@ -154,7 +183,7 @@ def _read_all(self, n): break else: x = self.sock.recv(n) - + if len(x) == 0: raise EOFError() out += x @@ -162,11 +191,10 @@ def _read_all(self, n): return out def _send_packet(self, t, packet): - #self._log(DEBUG2, 'write: %s (len=%d)' % (CMD_NAMES.get(t, '0x%02x' % t), len(packet))) packet = asbytes(packet) - out = struct.pack('>I', len(packet) + 1) + byte_chr(t) + packet + out = struct.pack(">I", len(packet) + 1) + byte_chr(t) + packet if self.ultra_debug: - self._log(DEBUG, util.format_binary(out, 'OUT: ')) + self._log(DEBUG, util.format_binary(out, "OUT: ")) self._write_all(out) def _read_packet(self): @@ -174,13 +202,12 @@ def _read_packet(self): # most sftp servers won't accept packets larger than about 32k, so # anything with the high byte set (> 16MB) is just garbage. if byte_ord(x[0]): - raise SFTPError('Garbage packet received') - size = struct.unpack('>I', x)[0] + raise SFTPError("Garbage packet received") + size = struct.unpack(">I", x)[0] data = self._read_all(size) if self.ultra_debug: - self._log(DEBUG, util.format_binary(data, 'IN: ')) + self._log(DEBUG, util.format_binary(data, "IN: ")) if size > 0: t = byte_ord(data[0]) - #self._log(DEBUG2, 'read: %s (len=%d)' % (CMD_NAMES.get(t), '0x%02x' % t, len(data)-1)) return t, data[1:] return 0, bytes() diff --git a/paramiko/sftp_attr.py b/paramiko/sftp_attr.py index 0eaca30b7..8b1c17bd5 100644 --- a/paramiko/sftp_attr.py +++ b/paramiko/sftp_attr.py @@ -19,10 +19,10 @@ import stat import time from paramiko.common import x80000000, o700, o70, xffffffff -from paramiko.py3compat import long, b +from paramiko.py3compat import long, PY2, strftime -class SFTPAttributes (object): +class SFTPAttributes(object): """ Representation of the attributes of a file (or proxied file) for SFTP in client or server mode. It attemps to mirror the object returned by @@ -82,9 +82,9 @@ def from_stat(cls, obj, filename=None): return attr def __repr__(self): - return '' % self._debug_str() + return "".format(self._debug_str()) - ### internals... + # ...internals... @classmethod def _from_msg(cls, msg, filename=None, longname=None): attr = cls() @@ -144,67 +144,74 @@ def _pack(self, msg): return def _debug_str(self): - out = '[ ' + out = "[ " if self.st_size is not None: - out += 'size=%d ' % self.st_size + out += "size={} ".format(self.st_size) if (self.st_uid is not None) and (self.st_gid is not None): - out += 'uid=%d gid=%d ' % (self.st_uid, self.st_gid) + out += "uid={} gid={} ".format(self.st_uid, self.st_gid) if self.st_mode is not None: - out += 'mode=' + oct(self.st_mode) + ' ' + out += "mode=" + oct(self.st_mode) + " " if (self.st_atime is not None) and (self.st_mtime is not None): - out += 'atime=%d mtime=%d ' % (self.st_atime, self.st_mtime) + out += "atime={} mtime={} ".format(self.st_atime, self.st_mtime) for k, v in self.attr.items(): - out += '"%s"=%r ' % (str(k), v) - out += ']' + out += '"{}"={!r} '.format(str(k), v) + out += "]" return out @staticmethod def _rwx(n, suid, sticky=False): if suid: suid = 2 - out = '-r'[n >> 2] + '-w'[(n >> 1) & 1] + out = "-r"[n >> 2] + "-w"[(n >> 1) & 1] if sticky: - out += '-xTt'[suid + (n & 1)] + out += "-xTt"[suid + (n & 1)] else: - out += '-xSs'[suid + (n & 1)] + out += "-xSs"[suid + (n & 1)] return out - def __str__(self): + def _as_text(self): """create a unix-style long description of the file (like ls -l)""" if self.st_mode is not None: kind = stat.S_IFMT(self.st_mode) if kind == stat.S_IFIFO: - ks = 'p' + ks = "p" elif kind == stat.S_IFCHR: - ks = 'c' + ks = "c" elif kind == stat.S_IFDIR: - ks = 'd' + ks = "d" elif kind == stat.S_IFBLK: - ks = 'b' + ks = "b" elif kind == stat.S_IFREG: - ks = '-' + ks = "-" elif kind == stat.S_IFLNK: - ks = 'l' + ks = "l" elif kind == stat.S_IFSOCK: - ks = 's' + ks = "s" else: - ks = '?' - ks += self._rwx((self.st_mode & o700) >> 6, self.st_mode & stat.S_ISUID) - ks += self._rwx((self.st_mode & o70) >> 3, self.st_mode & stat.S_ISGID) - ks += self._rwx(self.st_mode & 7, self.st_mode & stat.S_ISVTX, True) + ks = "?" + ks += self._rwx( + (self.st_mode & o700) >> 6, self.st_mode & stat.S_ISUID + ) + ks += self._rwx( + (self.st_mode & o70) >> 3, self.st_mode & stat.S_ISGID + ) + ks += self._rwx( + self.st_mode & 7, self.st_mode & stat.S_ISVTX, True + ) else: - ks = '?---------' + ks = "?---------" # compute display date if (self.st_mtime is None) or (self.st_mtime == xffffffff): # shouldn't really happen - datestr = '(unknown date)' + datestr = "(unknown date)" else: + time_tuple = time.localtime(self.st_mtime) if abs(time.time() - self.st_mtime) > 15552000: # (15552000 = 6 months) - datestr = time.strftime('%d %b %Y', time.localtime(self.st_mtime)) + datestr = strftime("%d %b %Y", time_tuple) else: - datestr = time.strftime('%d %b %H:%M', time.localtime(self.st_mtime)) - filename = getattr(self, 'filename', '?') + datestr = strftime("%d %b %H:%M", time_tuple) + filename = getattr(self, "filename", "?") # not all servers support uid/gid uid = self.st_uid @@ -217,7 +224,23 @@ def __str__(self): if size is None: size = 0 - return '%s 1 %-8d %-8d %8d %-12s %s' % (ks, uid, gid, size, datestr, filename) + # TODO: not sure this actually worked as expected beforehand, leaving + # it untouched for the time being, re: .format() upgrade, until someone + # has time to doublecheck + return "%s 1 %-8d %-8d %8d %-12s %s" % ( + ks, + uid, + gid, + size, + datestr, + filename, + ) def asbytes(self): - return b(str(self)) + return self._as_text().encode("utf-8") + + if PY2: + __unicode__ = _as_text + __str__ = asbytes + else: + __str__ = _as_text diff --git a/paramiko/sftp_client.py b/paramiko/sftp_client.py index 0df94389a..d6c3a70d9 100644 --- a/paramiko/sftp_client.py +++ b/paramiko/sftp_client.py @@ -28,13 +28,40 @@ from paramiko.channel import Channel from paramiko.message import Message from paramiko.common import INFO, DEBUG, o777 -from paramiko.py3compat import bytestring, b, u, long, string_types, bytes_types -from paramiko.sftp import BaseSFTP, CMD_OPENDIR, CMD_HANDLE, SFTPError, CMD_READDIR, \ - CMD_NAME, CMD_CLOSE, SFTP_FLAG_READ, SFTP_FLAG_WRITE, SFTP_FLAG_CREATE, \ - SFTP_FLAG_TRUNC, SFTP_FLAG_APPEND, SFTP_FLAG_EXCL, CMD_OPEN, CMD_REMOVE, \ - CMD_RENAME, CMD_MKDIR, CMD_RMDIR, CMD_STAT, CMD_ATTRS, CMD_LSTAT, \ - CMD_SYMLINK, CMD_SETSTAT, CMD_READLINK, CMD_REALPATH, CMD_STATUS, SFTP_OK, \ - SFTP_EOF, SFTP_NO_SUCH_FILE, SFTP_PERMISSION_DENIED +from paramiko.py3compat import b, u, long +from paramiko.sftp import ( + BaseSFTP, + CMD_OPENDIR, + CMD_HANDLE, + SFTPError, + CMD_READDIR, + CMD_NAME, + CMD_CLOSE, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_APPEND, + SFTP_FLAG_EXCL, + CMD_OPEN, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_SYMLINK, + CMD_SETSTAT, + CMD_READLINK, + CMD_REALPATH, + CMD_STATUS, + CMD_EXTENDED, + SFTP_OK, + SFTP_EOF, + SFTP_NO_SUCH_FILE, + SFTP_PERMISSION_DENIED, +) from paramiko.sftp_attr import SFTPAttributes from paramiko.ssh_exception import SSHException @@ -49,14 +76,15 @@ def _to_unicode(s): probably doesn't know the filename's encoding. """ try: - return s.encode('ascii') + return s.encode("ascii") except (UnicodeError, AttributeError): try: - return s.decode('utf-8') + return s.decode("utf-8") except UnicodeError: return s -b_slash = b'/' + +b_slash = b"/" class SFTPClient(BaseSFTP, ClosingContextManager): @@ -68,6 +96,7 @@ class SFTPClient(BaseSFTP, ClosingContextManager): Instances of this class may be used as context managers. """ + def __init__(self, sock): """ Create an SFTP client from an existing `.Channel`. The channel @@ -78,8 +107,8 @@ def __init__(self, sock): :param .Channel sock: an open `.Channel` using the ``"sftp"`` subsystem - :raises SSHException: if there's an exception while negotiating - sftp + :raises: + `.SSHException` -- if there's an exception while negotiating sftp """ BaseSFTP.__init__(self) self.sock = sock @@ -93,13 +122,20 @@ def __init__(self, sock): if type(sock) is Channel: # override default logger transport = self.sock.get_transport() - self.logger = util.get_logger(transport.get_log_channel() + '.sftp') + self.logger = util.get_logger( + transport.get_log_channel() + ".sftp" + ) self.ultra_debug = transport.get_hexdump() try: server_version = self._send_version() except EOFError: - raise SSHException('EOF during negotiation') - self._log(INFO, 'Opened sftp connection (server version %d)' % server_version) + raise SSHException("EOF during negotiation") + self._log( + INFO, + "Opened sftp connection (server version {})".format( + server_version + ), + ) @classmethod def from_transport(cls, t, window_size=None, max_packet_size=None): @@ -111,7 +147,8 @@ def from_transport(cls, t, window_size=None, max_packet_size=None): OpenSSH and should work adequately for both files transfers and interactive sessions. - :param .Transport t: an open `.Transport` which is already authenticated + :param .Transport t: an open `.Transport` which is already + authenticated :param int window_size: optional window size for the `.SFTPClient` session. :param int max_packet_size: @@ -124,11 +161,12 @@ def from_transport(cls, t, window_size=None, max_packet_size=None): .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ - chan = t.open_session(window_size=window_size, - max_packet_size=max_packet_size) + chan = t.open_session( + window_size=window_size, max_packet_size=max_packet_size + ) if chan is None: return None - chan.invoke_subsystem('sftp') + chan.invoke_subsystem("sftp") return cls(chan) def _log(self, level, msg, *args): @@ -136,9 +174,16 @@ def _log(self, level, msg, *args): for m in msg: self._log(level, m, *args) else: - # escape '%' in msg (they could come from file or directory names) before logging - msg = msg.replace('%','%%') - super(SFTPClient, self)._log(level, "[chan %s] " + msg, *([self.sock.get_name()] + list(args))) + # NOTE: these bits MUST continue using %-style format junk because + # logging.Logger.log() explicitly requires it. Grump. + # escape '%' in msg (they could come from file or directory names) + # before logging + msg = msg.replace("%", "%%") + super(SFTPClient, self)._log( + level, + "[chan %s] " + msg, + *([self.sock.get_name()] + list(args)) + ) def close(self): """ @@ -146,7 +191,7 @@ def close(self): .. versionadded:: 1.4 """ - self._log(INFO, 'sftp session closed.') + self._log(INFO, "sftp session closed.") self.sock.close() def get_channel(self): @@ -158,9 +203,10 @@ def get_channel(self): """ return self.sock - def listdir(self, path='.'): + def listdir(self, path="."): """ - Return a list containing the names of the entries in the given ``path``. + Return a list containing the names of the entries in the given + ``path``. The list is in arbitrary order. It does not include the special entries ``'.'`` and ``'..'`` even if they are present in the folder. @@ -171,7 +217,7 @@ def listdir(self, path='.'): """ return [f.filename for f in self.listdir_attr(path)] - def listdir_attr(self, path='.'): + def listdir_attr(self, path="."): """ Return a list containing `.SFTPAttributes` objects corresponding to files in the given ``path``. The list is in arbitrary order. It does @@ -189,10 +235,10 @@ def listdir_attr(self, path='.'): .. versionadded:: 1.2 """ path = self._adjust_cwd(path) - self._log(DEBUG, 'listdir(%r)' % path) + self._log(DEBUG, "listdir({!r})".format(path)) t, msg = self._request(CMD_OPENDIR, path) if t != CMD_HANDLE: - raise SFTPError('Expected handle') + raise SFTPError("Expected handle") handle = msg.get_binary() filelist = [] while True: @@ -202,18 +248,18 @@ def listdir_attr(self, path='.'): # done with handle break if t != CMD_NAME: - raise SFTPError('Expected name response') + raise SFTPError("Expected name response") count = msg.get_int() for i in range(count): filename = msg.get_text() longname = msg.get_text() attr = SFTPAttributes._from_msg(msg, filename, longname) - if (filename != '.') and (filename != '..'): + if (filename != ".") and (filename != ".."): filelist.append(attr) self._request(CMD_CLOSE, handle) return filelist - def listdir_iter(self, path='.', read_aheads=50): + def listdir_iter(self, path=".", read_aheads=50): """ Generator version of `.listdir_attr`. @@ -223,16 +269,16 @@ def listdir_iter(self, path='.', read_aheads=50): ``read_aheads``, an integer controlling how many ``SSH_FXP_READDIR`` requests are made to the server. The default of 50 should suffice for most file listings as each request/response cycle - may contain multiple files (dependant on server implementation.) + may contain multiple files (dependent on server implementation.) .. versionadded:: 1.15 """ path = self._adjust_cwd(path) - self._log(DEBUG, 'listdir(%r)' % path) + self._log(DEBUG, "listdir({!r})".format(path)) t, msg = self._request(CMD_OPENDIR, path) if t != CMD_HANDLE: - raise SFTPError('Expected handle') + raise SFTPError("Expected handle") handle = msg.get_string() @@ -247,7 +293,6 @@ def listdir_iter(self, path='.', read_aheads=50): num = self._async_request(type(None), CMD_READDIR, handle) nums.append(num) - # For each of our sent requests # Read and parse the corresponding packets # If we're at the end of our queued requests, then fire off @@ -266,8 +311,9 @@ def listdir_iter(self, path='.', read_aheads=50): filename = msg.get_text() longname = msg.get_text() attr = SFTPAttributes._from_msg( - msg, filename, longname) - if (filename != '.') and (filename != '..'): + msg, filename, longname + ) + if (filename != ".") and (filename != ".."): yield attr # If we've hit the end of our queued requests, reset nums. @@ -277,8 +323,7 @@ def listdir_iter(self, path='.', read_aheads=50): self._request(CMD_CLOSE, handle) return - - def open(self, filename, mode='r', bufsize=-1): + def open(self, filename, mode="r", bufsize=-1): """ Open a file on the remote server. The arguments are the same as for Python's built-in `python:file` (aka `python:open`). A file-like @@ -299,36 +344,41 @@ def open(self, filename, mode='r', bufsize=-1): ``O_EXCL`` flag in posix. The file will be buffered in standard Python style by default, but - can be altered with the ``bufsize`` parameter. ``0`` turns off + can be altered with the ``bufsize`` parameter. ``<=0`` turns off buffering, ``1`` uses line buffering, and any number greater than 1 (``>1``) uses that specific buffer size. :param str filename: name of the file to open :param str mode: mode (Python-style) to open in - :param int bufsize: desired buffering (-1 = default buffer size) + :param int bufsize: desired buffering (default: ``-1``) :return: an `.SFTPFile` object representing the open file - :raises IOError: if the file could not be opened. + :raises: ``IOError`` -- if the file could not be opened. """ filename = self._adjust_cwd(filename) - self._log(DEBUG, 'open(%r, %r)' % (filename, mode)) + self._log(DEBUG, "open({!r}, {!r})".format(filename, mode)) imode = 0 - if ('r' in mode) or ('+' in mode): + if ("r" in mode) or ("+" in mode): imode |= SFTP_FLAG_READ - if ('w' in mode) or ('+' in mode) or ('a' in mode): + if ("w" in mode) or ("+" in mode) or ("a" in mode): imode |= SFTP_FLAG_WRITE - if 'w' in mode: + if "w" in mode: imode |= SFTP_FLAG_CREATE | SFTP_FLAG_TRUNC - if 'a' in mode: + if "a" in mode: imode |= SFTP_FLAG_CREATE | SFTP_FLAG_APPEND - if 'x' in mode: + if "x" in mode: imode |= SFTP_FLAG_CREATE | SFTP_FLAG_EXCL attrblock = SFTPAttributes() t, msg = self._request(CMD_OPEN, filename, imode, attrblock) if t != CMD_HANDLE: - raise SFTPError('Expected handle') + raise SFTPError("Expected handle") handle = msg.get_binary() - self._log(DEBUG, 'open(%r, %r) -> %s' % (filename, mode, hexlify(handle))) + self._log( + DEBUG, + "open({!r}, {!r}) -> {}".format( + filename, mode, u(hexlify(handle)) + ), + ) return SFTPFile(self, handle, mode, bufsize) # Python continues to vacillate about "open" vs "file"... @@ -341,10 +391,10 @@ def remove(self, path): :param str path: path (absolute or relative) of the file to remove - :raises IOError: if the path refers to a folder (directory) + :raises: ``IOError`` -- if the path refers to a folder (directory) """ path = self._adjust_cwd(path) - self._log(DEBUG, 'remove(%r)' % path) + self._log(DEBUG, "remove({!r})".format(path)) self._request(CMD_REMOVE, path) unlink = remove @@ -353,17 +403,47 @@ def rename(self, oldpath, newpath): """ Rename a file or folder from ``oldpath`` to ``newpath``. - :param str oldpath: existing name of the file or folder - :param str newpath: new name for the file or folder + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + existing name of the file or folder + :param str newpath: + new name for the file or folder, must not exist already - :raises IOError: if ``newpath`` is a folder, or something else goes + :raises: + ``IOError`` -- if ``newpath`` is a folder, or something else goes wrong """ oldpath = self._adjust_cwd(oldpath) newpath = self._adjust_cwd(newpath) - self._log(DEBUG, 'rename(%r, %r)' % (oldpath, newpath)) + self._log(DEBUG, "rename({!r}, {!r})".format(oldpath, newpath)) self._request(CMD_RENAME, oldpath, newpath) + def posix_rename(self, oldpath, newpath): + """ + Rename a file or folder from ``oldpath`` to ``newpath``, following + posix conventions. + + :param str oldpath: existing name of the file or folder + :param str newpath: new name for the file or folder, will be + overwritten if it already exists + + :raises: + ``IOError`` -- if ``newpath`` is a folder, posix-rename is not + supported by the server or something else goes wrong + + :versionadded: 2.2 + """ + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(DEBUG, "posix_rename({!r}, {!r})".format(oldpath, newpath)) + self._request( + CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath + ) + def mkdir(self, path, mode=o777): """ Create a folder (directory) named ``path`` with numeric mode ``mode``. @@ -374,7 +454,7 @@ def mkdir(self, path, mode=o777): :param int mode: permissions (posix-style) for the newly-created folder """ path = self._adjust_cwd(path) - self._log(DEBUG, 'mkdir(%r, %r)' % (path, mode)) + self._log(DEBUG, "mkdir({!r}, {!r})".format(path, mode)) attr = SFTPAttributes() attr.st_mode = mode self._request(CMD_MKDIR, path, attr) @@ -386,7 +466,7 @@ def rmdir(self, path): :param str path: name of the folder to remove """ path = self._adjust_cwd(path) - self._log(DEBUG, 'rmdir(%r)' % path) + self._log(DEBUG, "rmdir({!r})".format(path)) self._request(CMD_RMDIR, path) def stat(self, path): @@ -409,10 +489,10 @@ def stat(self, path): file """ path = self._adjust_cwd(path) - self._log(DEBUG, 'stat(%r)' % path) + self._log(DEBUG, "stat({!r})".format(path)) t, msg = self._request(CMD_STAT, path) if t != CMD_ATTRS: - raise SFTPError('Expected attributes') + raise SFTPError("Expected attributes") return SFTPAttributes._from_msg(msg) def lstat(self, path): @@ -427,23 +507,22 @@ def lstat(self, path): file """ path = self._adjust_cwd(path) - self._log(DEBUG, 'lstat(%r)' % path) + self._log(DEBUG, "lstat({!r})".format(path)) t, msg = self._request(CMD_LSTAT, path) if t != CMD_ATTRS: - raise SFTPError('Expected attributes') + raise SFTPError("Expected attributes") return SFTPAttributes._from_msg(msg) def symlink(self, source, dest): """ - Create a symbolic link (shortcut) of the ``source`` path at - ``destination``. + Create a symbolic link to the ``source`` path at ``destination``. :param str source: path of the original file :param str dest: path of the newly created symlink """ dest = self._adjust_cwd(dest) - self._log(DEBUG, 'symlink(%r, %r)' % (source, dest)) - source = bytestring(source) + self._log(DEBUG, "symlink({!r}, {!r})".format(source, dest)) + source = b(source) self._request(CMD_SYMLINK, source, dest) def chmod(self, path, mode): @@ -456,7 +535,7 @@ def chmod(self, path, mode): :param int mode: new permissions """ path = self._adjust_cwd(path) - self._log(DEBUG, 'chmod(%r, %r)' % (path, mode)) + self._log(DEBUG, "chmod({!r}, {!r})".format(path, mode)) attr = SFTPAttributes() attr.st_mode = mode self._request(CMD_SETSTAT, path, attr) @@ -473,19 +552,19 @@ def chown(self, path, uid, gid): :param int gid: new group id """ path = self._adjust_cwd(path) - self._log(DEBUG, 'chown(%r, %r, %r)' % (path, uid, gid)) + self._log(DEBUG, "chown({!r}, {!r}, {!r})".format(path, uid, gid)) attr = SFTPAttributes() attr.st_uid, attr.st_gid = uid, gid self._request(CMD_SETSTAT, path, attr) def utime(self, path, times): """ - Set the access and modified times of the file specified by ``path``. If - ``times`` is ``None``, then the file's access and modified times are set - to the current time. Otherwise, ``times`` must be a 2-tuple of numbers, - of the form ``(atime, mtime)``, which is used to set the access and - modified times, respectively. This bizarre API is mimicked from Python - for the sake of consistency -- I apologize. + Set the access and modified times of the file specified by ``path``. + If ``times`` is ``None``, then the file's access and modified times + are set to the current time. Otherwise, ``times`` must be a 2-tuple + of numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. :param str path: path of the file to modify :param tuple times: @@ -495,7 +574,7 @@ def utime(self, path, times): path = self._adjust_cwd(path) if times is None: times = (time.time(), time.time()) - self._log(DEBUG, 'utime(%r, %r)' % (path, times)) + self._log(DEBUG, "utime({!r}, {!r})".format(path, times)) attr = SFTPAttributes() attr.st_atime, attr.st_mtime = times self._request(CMD_SETSTAT, path, attr) @@ -507,11 +586,10 @@ def truncate(self, path, size): method on Python file objects. :param str path: path of the file to modify - :param size: the new size of the file - :type size: int or long + :param int size: the new size of the file """ path = self._adjust_cwd(path) - self._log(DEBUG, 'truncate(%r, %r)' % (path, size)) + self._log(DEBUG, "truncate({!r}, {!r})".format(path, size)) attr = SFTPAttributes() attr.st_size = size self._request(CMD_SETSTAT, path, attr) @@ -526,15 +604,15 @@ def readlink(self, path): :return: target path, as a `str` """ path = self._adjust_cwd(path) - self._log(DEBUG, 'readlink(%r)' % path) + self._log(DEBUG, "readlink({!r})".format(path)) t, msg = self._request(CMD_READLINK, path) if t != CMD_NAME: - raise SFTPError('Expected name response') + raise SFTPError("Expected name response") count = msg.get_int() if count == 0: return None if count != 1: - raise SFTPError('Readlink returned %d results' % count) + raise SFTPError("Readlink returned {} results".format(count)) return _to_unicode(msg.get_string()) def normalize(self, path): @@ -547,16 +625,16 @@ def normalize(self, path): :param str path: path to be normalized :return: normalized form of the given path (as a `str`) - :raises IOError: if the path can't be resolved on the server + :raises: ``IOError`` -- if the path can't be resolved on the server """ path = self._adjust_cwd(path) - self._log(DEBUG, 'normalize(%r)' % path) + self._log(DEBUG, "normalize({!r})".format(path)) t, msg = self._request(CMD_REALPATH, path) if t != CMD_NAME: - raise SFTPError('Expected name response') + raise SFTPError("Expected name response") count = msg.get_int() if count != 1: - raise SFTPError('Realpath returned %d results' % count) + raise SFTPError("Realpath returned {} results".format(count)) return msg.get_text() def chdir(self, path=None): @@ -570,7 +648,8 @@ def chdir(self, path=None): :param str path: new current working directory - :raises IOError: if the requested path doesn't exist on the server + :raises: + ``IOError`` -- if the requested path doesn't exist on the server .. versionadded:: 1.4 """ @@ -578,7 +657,8 @@ def chdir(self, path=None): self._cwd = None return if not stat.S_ISDIR(self.stat(path).st_mode): - raise SFTPError(errno.ENOTDIR, "%s: %s" % (os.strerror(errno.ENOTDIR), path)) + code = errno.ENOTDIR + raise SFTPError(code, "{}: {}".format(os.strerror(code), path)) self._cwd = b(self.normalize(path)) def getcwd(self): @@ -631,7 +711,7 @@ def putfo(self, fl, remotepath, file_size=0, callback=None, confirm=True): .. versionadded:: 1.10 """ - with self.file(remotepath, 'wb') as fr: + with self.file(remotepath, "wb") as fr: fr.set_pipelined(True) size = self._transfer_with_callback( reader=fl, writer=fr, file_size=file_size, callback=callback @@ -639,7 +719,9 @@ def putfo(self, fl, remotepath, file_size=0, callback=None, confirm=True): if confirm: s = self.stat(remotepath) if s.st_size != size: - raise IOError('size mismatch in put! %d != %d' % (s.st_size, size)) + raise IOError( + "size mismatch in put! {} != {}".format(s.st_size, size) + ) else: s = SFTPAttributes() return s @@ -663,7 +745,8 @@ def put(self, localpath, remotepath, callback=None, confirm=True): whether to do a stat() on the file afterwards to confirm the file size - :return: an `.SFTPAttributes` object containing attributes about the given file + :return: an `.SFTPAttributes` object containing attributes about the + given file .. versionadded:: 1.4 .. versionchanged:: 1.7.4 @@ -672,10 +755,10 @@ def put(self, localpath, remotepath, callback=None, confirm=True): ``confirm`` param added. """ file_size = os.stat(localpath).st_size - with open(localpath, 'rb') as fl: + with open(localpath, "rb") as fl: return self.putfo(fl, remotepath, file_size, callback, confirm) - def getfo(self, remotepath, fl, callback=None): + def getfo(self, remotepath, fl, callback=None, prefetch=True): """ Copy a remote file (``remotepath``) from the SFTP server and write to an open file or file-like object, ``fl``. Any exception raised by @@ -688,20 +771,23 @@ def getfo(self, remotepath, fl, callback=None): :param callable callback: optional callback function (form: ``func(int, int)``) that accepts the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) :return: the `number ` of bytes written to the opened file object .. versionadded:: 1.10 + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. """ file_size = self.stat(remotepath).st_size - with self.open(remotepath, 'rb') as fr: - fr.prefetch(file_size) + with self.open(remotepath, "rb") as fr: + if prefetch: + fr.prefetch(file_size) return self._transfer_with_callback( reader=fr, writer=fl, file_size=file_size, callback=callback ) - return size - - def get(self, remotepath, localpath, callback=None): + def get(self, remotepath, localpath, callback=None, prefetch=True): """ Copy a remote file (``remotepath``) from the SFTP server to the local host as ``localpath``. Any exception raised by operations will be @@ -712,18 +798,24 @@ def get(self, remotepath, localpath, callback=None): :param callable callback: optional callback function (form: ``func(int, int)``) that accepts the bytes transferred so far and the total bytes to be transferred + :param bool prefetch: + controls whether prefetching is performed (default: True) .. versionadded:: 1.4 .. versionchanged:: 1.7.4 Added the ``callback`` param + .. versionchanged:: 2.8 + Added the ``prefetch`` keyword argument. """ - with open(localpath, 'wb') as fl: - size = self.getfo(remotepath, fl, callback) + with open(localpath, "wb") as fl: + size = self.getfo(remotepath, fl, callback, prefetch) s = os.stat(localpath) if s.st_size != size: - raise IOError('size mismatch in get! %d != %d' % (s.st_size, size)) + raise IOError( + "size mismatch in get! {} != {}".format(s.st_size, size) + ) - ### internals... + # ...internals... def _request(self, t, *arg): num = self._async_request(type(None), t, *arg) @@ -740,12 +832,12 @@ def _async_request(self, fileobj, t, *arg): msg.add_int64(item) elif isinstance(item, int): msg.add_int(item) - elif isinstance(item, (string_types, bytes_types)): - msg.add_string(item) elif isinstance(item, SFTPAttributes): item._pack(msg) else: - raise Exception('unknown type for %r type %r' % (item, type(item))) + # For all other types, rely on as_string() to either coerce + # to bytes before writing or raise a suitable exception. + msg.add_string(item) num = self.request_number self._expecting[num] = fileobj self.request_number += 1 @@ -759,14 +851,15 @@ def _read_response(self, waitfor=None): try: t, data = self._read_packet() except EOFError as e: - raise SSHException('Server connection dropped: %s' % str(e)) + raise SSHException("Server connection dropped: {}".format(e)) msg = Message(data) num = msg.get_int() self._lock.acquire() try: if num not in self._expecting: - # might be response for a file that was closed before responses came back - self._log(DEBUG, 'Unexpected response #%d' % (num,)) + # might be response for a file that was closed before + # responses came back + self._log(DEBUG, "Unexpected response #{}".format(num)) if waitfor is None: # just doing a single check break @@ -780,7 +873,10 @@ def _read_response(self, waitfor=None): if t == CMD_STATUS: self._convert_status(msg) return t, msg - if fileobj is not type(None): + + # can not rewrite this to deal with E721, either as a None check + # nor as not an instance of None or NoneType + if fileobj is not type(None): # noqa fileobj._async_response(t, msg, num) if waitfor is None: # just doing a single check @@ -828,6 +924,7 @@ def _adjust_cwd(self, path): class SFTP(SFTPClient): """ - An alias for `.SFTPClient` for backwards compatability. + An alias for `.SFTPClient` for backwards compatibility. """ + pass diff --git a/paramiko/sftp_file.py b/paramiko/sftp_file.py index fdf667cdb..0104d8577 100644 --- a/paramiko/sftp_file.py +++ b/paramiko/sftp_file.py @@ -30,13 +30,23 @@ from paramiko.common import DEBUG from paramiko.file import BufferedFile -from paramiko.py3compat import long -from paramiko.sftp import CMD_CLOSE, CMD_READ, CMD_DATA, SFTPError, CMD_WRITE, \ - CMD_STATUS, CMD_FSTAT, CMD_ATTRS, CMD_FSETSTAT, CMD_EXTENDED +from paramiko.py3compat import u, long +from paramiko.sftp import ( + CMD_CLOSE, + CMD_READ, + CMD_DATA, + SFTPError, + CMD_WRITE, + CMD_STATUS, + CMD_FSTAT, + CMD_ATTRS, + CMD_FSETSTAT, + CMD_EXTENDED, +) from paramiko.sftp_attr import SFTPAttributes -class SFTPFile (BufferedFile): +class SFTPFile(BufferedFile): """ Proxy object for a file on the remote server, in client mode SFTP. @@ -48,7 +58,7 @@ class SFTPFile (BufferedFile): # this size. MAX_REQUEST_SIZE = 32768 - def __init__(self, sftp, handle, mode='r', bufsize=-1): + def __init__(self, sftp, handle, mode="r", bufsize=-1): BufferedFile.__init__(self) self.sftp = sftp self.handle = handle @@ -63,15 +73,15 @@ def __init__(self, sftp, handle, mode='r', bufsize=-1): self._reqs = deque() def __del__(self): - self._close(async=True) + self._close(async_=True) def close(self): """ Close the file. """ - self._close(async=False) + self._close(async_=False) - def _close(self, async=False): + def _close(self, async_=False): # We allow double-close without signaling an error, because real # Python file objects do. However, we must protect against actually # sending multiple CMD_CLOSE packets, because after we close our @@ -81,13 +91,14 @@ def _close(self, async=False): # __del__.) if self._closed: return - self.sftp._log(DEBUG, 'close(%s)' % hexlify(self.handle)) + self.sftp._log(DEBUG, "close({})".format(u(hexlify(self.handle)))) if self.pipelined: self.sftp._finish_responses(self) BufferedFile.close(self) try: - if async: - # GC'd file handle could be called from an arbitrary thread -- don't wait for a response + if async_: + # GC'd file handle could be called from an arbitrary thread + # -- don't wait for a response self.sftp._async_request(type(None), CMD_CLOSE, self.handle) else: self.sftp._request(CMD_CLOSE, self.handle) @@ -99,7 +110,9 @@ def _close(self, async=False): pass def _data_in_prefetch_requests(self, offset, size): - k = [x for x in list(self._prefetch_extents.values()) if x[0] <= offset] + k = [ + x for x in list(self._prefetch_extents.values()) if x[0] <= offset + ] if len(k) == 0: return False k.sort(key=lambda x: x[0]) @@ -110,8 +123,11 @@ def _data_in_prefetch_requests(self, offset, size): if buf_offset + buf_size >= offset + size: # inclusive return True - # well, we have part of the request. see if another chunk has the rest. - return self._data_in_prefetch_requests(buf_offset + buf_size, offset + size - buf_offset - buf_size) + # well, we have part of the request. see if another chunk has + # the rest. + return self._data_in_prefetch_requests( + buf_offset + buf_size, offset + size - buf_offset - buf_size + ) def _data_in_prefetch_buffers(self, offset): """ @@ -135,7 +151,8 @@ def _read_prefetch(self, size): read data out of the prefetch buffer, if possible. if the data isn't in the buffer, return None. otherwise, behaves like a normal read. """ - # while not closed, and haven't fetched past the current position, and haven't reached EOF... + # while not closed, and haven't fetched past the current position, + # and haven't reached EOF... while True: offset = self._data_in_prefetch_buffers(self._realpos) if offset is not None: @@ -165,21 +182,32 @@ def _read(self, size): data = self._read_prefetch(size) if data is not None: return data - t, msg = self.sftp._request(CMD_READ, self.handle, long(self._realpos), int(size)) + t, msg = self.sftp._request( + CMD_READ, self.handle, long(self._realpos), int(size) + ) if t != CMD_DATA: - raise SFTPError('Expected data') + raise SFTPError("Expected data") return msg.get_string() def _write(self, data): # may write less than requested if it would exceed max packet size chunk = min(len(data), self.MAX_REQUEST_SIZE) - self._reqs.append(self.sftp._async_request(type(None), CMD_WRITE, self.handle, long(self._realpos), data[:chunk])) - if not self.pipelined or (len(self._reqs) > 100 and self.sftp.sock.recv_ready()): + sftp_async_request = self.sftp._async_request( + type(None), + CMD_WRITE, + self.handle, + long(self._realpos), + data[:chunk], + ) + self._reqs.append(sftp_async_request) + if not self.pipelined or ( + len(self._reqs) > 100 and self.sftp.sock.recv_ready() + ): while len(self._reqs): req = self._reqs.popleft() t, msg = self.sftp._read_response(req) if t != CMD_STATUS: - raise SFTPError('Expected status') + raise SFTPError("Expected status") # convert_status already called return chunk @@ -228,6 +256,11 @@ def seekable(self): return True def seek(self, offset, whence=0): + """ + Set the file's current position. + + See `file.seek` for details. + """ self.flush() if whence == self.SEEK_SET: self._realpos = self._pos = offset @@ -244,11 +277,12 @@ def stat(self): exactly like `.SFTPClient.stat`, except that it operates on an already-open file. - :return: an `.SFTPAttributes` object containing attributes about this file. + :returns: + an `.SFTPAttributes` object containing attributes about this file. """ t, msg = self.sftp._request(CMD_FSTAT, self.handle) if t != CMD_ATTRS: - raise SFTPError('Expected attributes') + raise SFTPError("Expected attributes") return SFTPAttributes._from_msg(msg) def chmod(self, mode): @@ -259,7 +293,9 @@ def chmod(self, mode): :param int mode: new permissions """ - self.sftp._log(DEBUG, 'chmod(%s, %r)' % (hexlify(self.handle), mode)) + self.sftp._log( + DEBUG, "chmod({}, {!r})".format(hexlify(self.handle), mode) + ) attr = SFTPAttributes() attr.st_mode = mode self.sftp._request(CMD_FSETSTAT, self.handle, attr) @@ -274,7 +310,10 @@ def chown(self, uid, gid): :param int uid: new owner's uid :param int gid: new group id """ - self.sftp._log(DEBUG, 'chown(%s, %r, %r)' % (hexlify(self.handle), uid, gid)) + self.sftp._log( + DEBUG, + "chown({}, {!r}, {!r})".format(hexlify(self.handle), uid, gid), + ) attr = SFTPAttributes() attr.st_uid, attr.st_gid = uid, gid self.sftp._request(CMD_FSETSTAT, self.handle, attr) @@ -282,11 +321,11 @@ def chown(self, uid, gid): def utime(self, times): """ Set the access and modified times of this file. If - ``times`` is ``None``, then the file's access and modified times are set - to the current time. Otherwise, ``times`` must be a 2-tuple of numbers, - of the form ``(atime, mtime)``, which is used to set the access and - modified times, respectively. This bizarre API is mimicked from Python - for the sake of consistency -- I apologize. + ``times`` is ``None``, then the file's access and modified times are + set to the current time. Otherwise, ``times`` must be a 2-tuple of + numbers, of the form ``(atime, mtime)``, which is used to set the + access and modified times, respectively. This bizarre API is mimicked + from Python for the sake of consistency -- I apologize. :param tuple times: ``None`` or a tuple of (access time, modified time) in standard @@ -294,7 +333,9 @@ def utime(self, times): """ if times is None: times = (time.time(), time.time()) - self.sftp._log(DEBUG, 'utime(%s, %r)' % (hexlify(self.handle), times)) + self.sftp._log( + DEBUG, "utime({}, {!r})".format(hexlify(self.handle), times) + ) attr = SFTPAttributes() attr.st_atime, attr.st_mtime = times self.sftp._request(CMD_FSETSTAT, self.handle, attr) @@ -306,9 +347,10 @@ def truncate(self, size): Python file objects. :param size: the new size of the file - :type size: int or long """ - self.sftp._log(DEBUG, 'truncate(%s, %r)' % (hexlify(self.handle), size)) + self.sftp._log( + DEBUG, "truncate({}, {!r})".format(hexlify(self.handle), size) + ) attr = SFTPAttributes() attr.st_size = size self.sftp._request(CMD_FSETSTAT, self.handle, attr) @@ -319,9 +361,9 @@ def check(self, hash_algorithm, offset=0, length=0, block_size=0): to verify a successful upload or download, or for various rsync-like operations. - The file is hashed from ``offset``, for ``length`` bytes. If ``length`` - is 0, the remainder of the file is hashed. Thus, if both ``offset`` - and ``length`` are zero, the entire file is hashed. + The file is hashed from ``offset``, for ``length`` bytes. + If ``length`` is 0, the remainder of the file is hashed. Thus, if both + ``offset`` and ``length`` are zero, the entire file is hashed. Normally, ``block_size`` will be 0 (the default), and this method will return a byte string representing the requested hash (for example, a @@ -342,30 +384,34 @@ def check(self, hash_algorithm, offset=0, length=0, block_size=0): :param offset: offset into the file to begin hashing (0 means to start from the beginning) - :type offset: int or long :param length: number of bytes to hash (0 means continue to the end of the file) - :type length: int or long :param int block_size: number of bytes to hash per result (must not be less than 256; 0 means to compute only one hash of the entire segment) - :type block_size: int :return: `str` of bytes representing the hash of each block, concatenated together - :raises IOError: if the server doesn't support the "check-file" - extension, or possibly doesn't support the hash algorithm - requested + :raises: + ``IOError`` -- if the server doesn't support the "check-file" + extension, or possibly doesn't support the hash algorithm requested .. note:: Many (most?) servers don't support this extension yet. .. versionadded:: 1.4 """ - t, msg = self.sftp._request(CMD_EXTENDED, 'check-file', self.handle, - hash_algorithm, long(offset), long(length), block_size) - ext = msg.get_text() - alg = msg.get_text() + t, msg = self.sftp._request( + CMD_EXTENDED, + "check-file", + self.handle, + hash_algorithm, + long(offset), + long(length), + block_size, + ) + msg.get_text() # ext + msg.get_text() # alg data = msg.get_remainder() return data @@ -417,7 +463,7 @@ def prefetch(self, file_size=None): compatibility. """ if file_size is None: - file_size = self.stat().st_size; + file_size = self.stat().st_size # queue up async reads for the rest of the file chunks = [] @@ -437,19 +483,22 @@ def readv(self, chunks): once. :param chunks: - a list of (offset, length) tuples indicating which sections of the - file to read - :type chunks: list(tuple(long, int)) + a list of ``(offset, length)`` tuples indicating which sections of + the file to read :return: a list of blocks read, in the same order as in ``chunks`` .. versionadded:: 1.5.4 """ - self.sftp._log(DEBUG, 'readv(%s, %r)' % (hexlify(self.handle), chunks)) + self.sftp._log( + DEBUG, "readv({}, {!r})".format(hexlify(self.handle), chunks) + ) read_chunks = [] for offset, size in chunks: # don't fetch data that's already in the prefetch buffer - if self._data_in_prefetch_buffers(offset) or self._data_in_prefetch_requests(offset, size): + if self._data_in_prefetch_buffers( + offset + ) or self._data_in_prefetch_requests(offset, size): continue # break up anything larger than the max read size @@ -465,7 +514,7 @@ def readv(self, chunks): self.seek(x[0]) yield self.read(x[1]) - ### internals... + # ...internals... def _get_size(self): try: @@ -485,7 +534,9 @@ def _prefetch_thread(self, chunks): # do these read requests in a temporary thread because there may be # a lot of them, so it may block. for offset, length in chunks: - num = self.sftp._async_request(self, CMD_READ, self.handle, long(offset), int(length)) + num = self.sftp._async_request( + self, CMD_READ, self.handle, long(offset), int(length) + ) with self._prefetch_lock: self._prefetch_extents[num] = (offset, length) @@ -498,7 +549,7 @@ def _async_response(self, t, msg, num): self._saved_exception = e return if t != CMD_DATA: - raise SFTPError('Expected data') + raise SFTPError("Expected data") data = msg.get_string() while True: with self._prefetch_lock: diff --git a/paramiko/sftp_handle.py b/paramiko/sftp_handle.py index edceb5adc..a7e22f01d 100644 --- a/paramiko/sftp_handle.py +++ b/paramiko/sftp_handle.py @@ -25,24 +25,26 @@ from paramiko.util import ClosingContextManager -class SFTPHandle (ClosingContextManager): +class SFTPHandle(ClosingContextManager): """ Abstract object representing a handle to an open file (or folder) in an SFTP server implementation. Each handle has a string representation used by the client to refer to the underlying file. - + Server implementations can (and should) subclass SFTPHandle to implement features of a file handle, like `stat` or `chattr`. - + Instances of this class may be used as context managers. """ + def __init__(self, flags=0): """ Create a new file handle representing a local file being served over SFTP. If ``flags`` is passed in, it's used to determine if the file is open in append mode. - - :param int flags: optional flags as passed to `.SFTPServerInterface.open` + + :param int flags: optional flags as passed to + `.SFTPServerInterface.open` """ self.__flags = flags self.__name = None @@ -55,17 +57,17 @@ def close(self): When a client closes a file, this method is called on the handle. Normally you would use this method to close the underlying OS level file object(s). - + The default implementation checks for attributes on ``self`` named ``readfile`` and/or ``writefile``, and if either or both are present, their ``close()`` methods are called. This means that if you are using the default implementations of `read` and `write`, this method's default implementation should be fine also. """ - readfile = getattr(self, 'readfile', None) + readfile = getattr(self, "readfile", None) if readfile is not None: readfile.close() - writefile = getattr(self, 'writefile', None) + writefile = getattr(self, "writefile", None) if writefile is not None: writefile.close() @@ -76,7 +78,7 @@ def read(self, offset, length): to be 64 bits. If the end of the file has been reached, this method may return an - empty string to signify EOF, or it may also return `.SFTP_EOF`. + empty string to signify EOF, or it may also return ``SFTP_EOF``. The default implementation checks for an attribute on ``self`` named ``readfile``, and if present, performs the read operation on the Python @@ -84,11 +86,10 @@ def read(self, offset, length): common case where you are wrapping a Python file object.) :param offset: position in the file to start reading from. - :type offset: int or long :param int length: number of bytes to attempt to read. :return: data read from the file, or an SFTP error code, as a `str`. """ - readfile = getattr(self, 'readfile', None) + readfile = getattr(self, "readfile", None) if readfile is None: return SFTP_OP_UNSUPPORTED try: @@ -117,13 +118,12 @@ def write(self, offset, data): differently from ``readfile`` to make it easy to implement read-only (or write-only) files, but if both attributes are present, they should refer to the same file. - + :param offset: position in the file to start reading from. - :type offset: int or long :param str data: data to write into the file. - :return: an SFTP error code like `.SFTP_OK`. + :return: an SFTP error code like ``SFTP_OK``. """ - writefile = getattr(self, 'writefile', None) + writefile = getattr(self, "writefile", None) if writefile is None: return SFTP_OP_UNSUPPORTED try: @@ -151,7 +151,7 @@ def stat(self): :return: an attributes object for the given file, or an SFTP error code - (like `.SFTP_PERMISSION_DENIED`). + (like ``SFTP_PERMISSION_DENIED``). :rtype: `.SFTPAttributes` or error code """ return SFTP_OP_UNSUPPORTED @@ -163,11 +163,11 @@ def chattr(self, attr): check for the presence of fields before using them. :param .SFTPAttributes attr: the attributes to change on this file. - :return: an `int` error code like `.SFTP_OK`. + :return: an `int` error code like ``SFTP_OK``. """ return SFTP_OP_UNSUPPORTED - ### internals... + # ...internals... def _set_files(self, files): """ @@ -179,7 +179,7 @@ def _set_files(self, files): def _get_next_files(self): """ - Used by the SFTP server code to retreive a cached directory + Used by the SFTP server code to retrieve a cached directory listing. """ fnlist = self.__files[:16] diff --git a/paramiko/sftp_server.py b/paramiko/sftp_server.py index ce287e8f0..8265df96c 100644 --- a/paramiko/sftp_server.py +++ b/paramiko/sftp_server.py @@ -26,8 +26,13 @@ from hashlib import md5, sha1 from paramiko import util -from paramiko.sftp import BaseSFTP, Message, SFTP_FAILURE, \ - SFTP_PERMISSION_DENIED, SFTP_NO_SUCH_FILE +from paramiko.sftp import ( + BaseSFTP, + Message, + SFTP_FAILURE, + SFTP_PERMISSION_DENIED, + SFTP_NO_SUCH_FILE, +) from paramiko.sftp_si import SFTPServerInterface from paramiko.sftp_attr import SFTPAttributes from paramiko.common import DEBUG @@ -36,28 +41,65 @@ # known hash algorithms for the "check-file" extension -from paramiko.sftp import CMD_HANDLE, SFTP_DESC, CMD_STATUS, SFTP_EOF, CMD_NAME, \ - SFTP_BAD_MESSAGE, CMD_EXTENDED_REPLY, SFTP_FLAG_READ, SFTP_FLAG_WRITE, \ - SFTP_FLAG_APPEND, SFTP_FLAG_CREATE, SFTP_FLAG_TRUNC, SFTP_FLAG_EXCL, \ - CMD_NAMES, CMD_OPEN, CMD_CLOSE, SFTP_OK, CMD_READ, CMD_DATA, CMD_WRITE, \ - CMD_REMOVE, CMD_RENAME, CMD_MKDIR, CMD_RMDIR, CMD_OPENDIR, CMD_READDIR, \ - CMD_STAT, CMD_ATTRS, CMD_LSTAT, CMD_FSTAT, CMD_SETSTAT, CMD_FSETSTAT, \ - CMD_READLINK, CMD_SYMLINK, CMD_REALPATH, CMD_EXTENDED, SFTP_OP_UNSUPPORTED +from paramiko.sftp import ( + CMD_HANDLE, + SFTP_DESC, + CMD_STATUS, + SFTP_EOF, + CMD_NAME, + SFTP_BAD_MESSAGE, + CMD_EXTENDED_REPLY, + SFTP_FLAG_READ, + SFTP_FLAG_WRITE, + SFTP_FLAG_APPEND, + SFTP_FLAG_CREATE, + SFTP_FLAG_TRUNC, + SFTP_FLAG_EXCL, + CMD_NAMES, + CMD_OPEN, + CMD_CLOSE, + SFTP_OK, + CMD_READ, + CMD_DATA, + CMD_WRITE, + CMD_REMOVE, + CMD_RENAME, + CMD_MKDIR, + CMD_RMDIR, + CMD_OPENDIR, + CMD_READDIR, + CMD_STAT, + CMD_ATTRS, + CMD_LSTAT, + CMD_FSTAT, + CMD_SETSTAT, + CMD_FSETSTAT, + CMD_READLINK, + CMD_SYMLINK, + CMD_REALPATH, + CMD_EXTENDED, + SFTP_OP_UNSUPPORTED, +) -_hash_class = { - 'sha1': sha1, - 'md5': md5, -} +_hash_class = {"sha1": sha1, "md5": md5} -class SFTPServer (BaseSFTP, SubsystemHandler): +class SFTPServer(BaseSFTP, SubsystemHandler): """ Server-side SFTP subsystem support. Since this is a `.SubsystemHandler`, it can be (and is meant to be) set as the handler for ``"sftp"`` requests. Use `.Transport.set_subsystem_handler` to activate this class. """ - def __init__(self, channel, name, server, sftp_si=SFTPServerInterface, *largs, **kwargs): + def __init__( + self, + channel, + name, + server, + sftp_si=SFTPServerInterface, + *largs, + **kwargs + ): """ The constructor for SFTPServer is meant to be called from within the `.Transport` as a subsystem handler. ``server`` and any additional @@ -68,14 +110,14 @@ def __init__(self, channel, name, server, sftp_si=SFTPServerInterface, *largs, * :param str name: name of the requested subsystem. :param .ServerInterface server: the server object associated with this channel and subsystem - :param class sftp_si: + :param sftp_si: a subclass of `.SFTPServerInterface` to use for handling individual requests. """ BaseSFTP.__init__(self) SubsystemHandler.__init__(self, channel, name, server) transport = channel.get_transport() - self.logger = util.get_logger(transport.get_log_channel() + '.sftp') + self.logger = util.get_logger(transport.get_log_channel() + ".sftp") self.ultra_debug = transport.get_hexdump() self.next_handle = 1 # map of handle-string to SFTPHandle for files & folders: @@ -86,23 +128,27 @@ def __init__(self, channel, name, server, sftp_si=SFTPServerInterface, *largs, * def _log(self, level, msg): if issubclass(type(msg), list): for m in msg: - super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + m) + super(SFTPServer, self)._log( + level, "[chan " + self.sock.get_name() + "] " + m + ) else: - super(SFTPServer, self)._log(level, "[chan " + self.sock.get_name() + "] " + msg) + super(SFTPServer, self)._log( + level, "[chan " + self.sock.get_name() + "] " + msg + ) def start_subsystem(self, name, transport, channel): self.sock = channel - self._log(DEBUG, 'Started sftp server on channel %s' % repr(channel)) + self._log(DEBUG, "Started sftp server on channel {!r}".format(channel)) self._send_server_version() self.server.session_started() while True: try: t, data = self._read_packet() except EOFError: - self._log(DEBUG, 'EOF -- end of session') + self._log(DEBUG, "EOF -- end of session") return except Exception as e: - self._log(DEBUG, 'Exception on channel: ' + str(e)) + self._log(DEBUG, "Exception on channel: " + str(e)) self._log(DEBUG, util.tb_strings()) return msg = Message(data) @@ -110,7 +156,7 @@ def start_subsystem(self, name, transport, channel): try: self._process(t, request_number, msg) except Exception as e: - self._log(DEBUG, 'Exception in server processing: ' + str(e)) + self._log(DEBUG, "Exception in server processing: " + str(e)) self._log(DEBUG, util.tb_strings()) # send some kind of failure message, at least try: @@ -121,7 +167,8 @@ def start_subsystem(self, name, transport, channel): def finish_subsystem(self): self.server.session_ended() super(SFTPServer, self).finish_subsystem() - # close any file handles that were left open (so we can return them to the OS quickly) + # close any file handles that were left open + # (so we can return them to the OS quickly) for f in self.file_table.values(): f.close() for f in self.folder_table.values(): @@ -163,7 +210,7 @@ def set_file_attr(filename, attr): name of the file to alter (should usually be an absolute path). :param .SFTPAttributes attr: attributes to change. """ - if sys.platform != 'win32': + if sys.platform != "win32": # mode operations are meaningless on win32 if attr._flags & attr.FLAG_PERMISSIONS: os.chmod(filename, attr.st_mode) @@ -172,10 +219,10 @@ def set_file_attr(filename, attr): if attr._flags & attr.FLAG_AMTIME: os.utime(filename, (attr.st_atime, attr.st_mtime)) if attr._flags & attr.FLAG_SIZE: - with open(filename, 'w+') as f: + with open(filename, "w+") as f: f.truncate(attr.st_size) - ### internals... + # ...internals... def _response(self, request_number, t, *arg): msg = Message() @@ -190,7 +237,9 @@ def _response(self, request_number, t, *arg): elif type(item) is SFTPAttributes: item._pack(msg) else: - raise Exception('unknown type for ' + repr(item) + ' type ' + repr(type(item))) + raise Exception( + "unknown type for {!r} type {!r}".format(item, type(item)) + ) self._send_packet(t, msg) def _send_handle_response(self, request_number, handle, folder=False): @@ -198,7 +247,7 @@ def _send_handle_response(self, request_number, handle, folder=False): # must be error code self._send_status(request_number, handle) return - handle._set_name(b('hx%d' % self.next_handle)) + handle._set_name(b("hx{:d}".format(self.next_handle))) self.next_handle += 1 if folder: self.folder_table[handle._get_name()] = handle @@ -211,9 +260,10 @@ def _send_status(self, request_number, code, desc=None): try: desc = SFTP_DESC[code] except IndexError: - desc = 'Unknown' - # some clients expect a "langauge" tag at the end (but don't mind it being blank) - self._response(request_number, CMD_STATUS, code, desc, '') + desc = "Unknown" + # some clients expect a "langauge" tag at the end + # (but don't mind it being blank) + self._response(request_number, CMD_STATUS, code, desc, "") def _open_folder(self, request_number, path): resp = self.server.list_folder(path) @@ -251,7 +301,9 @@ def _check_file(self, request_number, msg): length = msg.get_int64() block_size = msg.get_int() if handle not in self.file_table: - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return f = self.file_table[handle] for x in alg_list: @@ -260,18 +312,22 @@ def _check_file(self, request_number, msg): alg = _hash_class[x] break else: - self._send_status(request_number, SFTP_FAILURE, 'No supported hash types found') + self._send_status( + request_number, SFTP_FAILURE, "No supported hash types found" + ) return if length == 0: st = f.stat() if not issubclass(type(st), SFTPAttributes): - self._send_status(request_number, st, 'Unable to stat file') + self._send_status(request_number, st, "Unable to stat file") return length = st.st_size - start if block_size == 0: block_size = length if block_size < 256: - self._send_status(request_number, SFTP_FAILURE, 'Block size too small') + self._send_status( + request_number, SFTP_FAILURE, "Block size too small" + ) return sum_out = bytes() @@ -285,7 +341,9 @@ def _check_file(self, request_number, msg): while count < blocklen: data = f.read(offset, chunklen) if not isinstance(data, bytes_types): - self._send_status(request_number, data, 'Unable to hash file') + self._send_status( + request_number, data, "Unable to hash file" + ) return hash_obj.update(data) count += len(data) @@ -294,7 +352,7 @@ def _check_file(self, request_number, msg): msg = Message() msg.add_int(request_number) - msg.add_string('check-file') + msg.add_string("check-file") msg.add_string(algname) msg.add_bytes(sum_out) self._send_packet(CMD_EXTENDED_REPLY, msg) @@ -318,12 +376,14 @@ def _convert_pflags(self, pflags): return flags def _process(self, t, request_number, msg): - self._log(DEBUG, 'Request: %s' % CMD_NAMES[t]) + self._log(DEBUG, "Request: {}".format(CMD_NAMES[t])) if t == CMD_OPEN: path = msg.get_text() flags = self._convert_pflags(msg.get_int()) attr = SFTPAttributes._from_msg(msg) - self._send_handle_response(request_number, self.server.open(path, flags, attr)) + self._send_handle_response( + request_number, self.server.open(path, flags, attr) + ) elif t == CMD_CLOSE: handle = msg.get_binary() if handle in self.folder_table: @@ -335,13 +395,17 @@ def _process(self, t, request_number, msg): del self.file_table[handle] self._send_status(request_number, SFTP_OK) return - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) elif t == CMD_READ: handle = msg.get_binary() offset = msg.get_int64() length = msg.get_int() if handle not in self.file_table: - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return data = self.file_table[handle].read(offset, length) if isinstance(data, (bytes_types, string_types)): @@ -356,16 +420,22 @@ def _process(self, t, request_number, msg): offset = msg.get_int64() data = msg.get_binary() if handle not in self.file_table: - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return - self._send_status(request_number, self.file_table[handle].write(offset, data)) + self._send_status( + request_number, self.file_table[handle].write(offset, data) + ) elif t == CMD_REMOVE: path = msg.get_text() self._send_status(request_number, self.server.remove(path)) elif t == CMD_RENAME: oldpath = msg.get_text() newpath = msg.get_text() - self._send_status(request_number, self.server.rename(oldpath, newpath)) + self._send_status( + request_number, self.server.rename(oldpath, newpath) + ) elif t == CMD_MKDIR: path = msg.get_text() attr = SFTPAttributes._from_msg(msg) @@ -380,7 +450,9 @@ def _process(self, t, request_number, msg): elif t == CMD_READDIR: handle = msg.get_binary() if handle not in self.folder_table: - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return folder = self.folder_table[handle] self._read_folder(request_number, folder) @@ -401,7 +473,9 @@ def _process(self, t, request_number, msg): elif t == CMD_FSTAT: handle = msg.get_binary() if handle not in self.file_table: - self._send_status(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._send_status( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return resp = self.file_table[handle].stat() if issubclass(type(resp), SFTPAttributes): @@ -416,29 +490,46 @@ def _process(self, t, request_number, msg): handle = msg.get_binary() attr = SFTPAttributes._from_msg(msg) if handle not in self.file_table: - self._response(request_number, SFTP_BAD_MESSAGE, 'Invalid handle') + self._response( + request_number, SFTP_BAD_MESSAGE, "Invalid handle" + ) return - self._send_status(request_number, self.file_table[handle].chattr(attr)) + self._send_status( + request_number, self.file_table[handle].chattr(attr) + ) elif t == CMD_READLINK: path = msg.get_text() resp = self.server.readlink(path) if isinstance(resp, (bytes_types, string_types)): - self._response(request_number, CMD_NAME, 1, resp, '', SFTPAttributes()) + self._response( + request_number, CMD_NAME, 1, resp, "", SFTPAttributes() + ) else: self._send_status(request_number, resp) elif t == CMD_SYMLINK: - # the sftp 2 draft is incorrect here! path always follows target_path + # the sftp 2 draft is incorrect here! + # path always follows target_path target_path = msg.get_text() path = msg.get_text() - self._send_status(request_number, self.server.symlink(target_path, path)) + self._send_status( + request_number, self.server.symlink(target_path, path) + ) elif t == CMD_REALPATH: path = msg.get_text() rpath = self.server.canonicalize(path) - self._response(request_number, CMD_NAME, 1, rpath, '', SFTPAttributes()) + self._response( + request_number, CMD_NAME, 1, rpath, "", SFTPAttributes() + ) elif t == CMD_EXTENDED: tag = msg.get_text() - if tag == 'check-file': + if tag == "check-file": self._check_file(request_number, msg) + elif tag == "posix-rename@openssh.com": + oldpath = msg.get_text() + newpath = msg.get_text() + self._send_status( + request_number, self.server.posix_rename(oldpath, newpath) + ) else: self._send_status(request_number, SFTP_OP_UNSUPPORTED) else: diff --git a/paramiko/sftp_si.py b/paramiko/sftp_si.py index 61db956c8..40dc561c0 100644 --- a/paramiko/sftp_si.py +++ b/paramiko/sftp_si.py @@ -25,7 +25,7 @@ from paramiko.sftp import SFTP_OP_UNSUPPORTED -class SFTPServerInterface (object): +class SFTPServerInterface(object): """ This class defines an interface for controlling the behavior of paramiko when using the `.SFTPServer` subsystem to provide an SFTP server. @@ -35,16 +35,16 @@ class SFTPServerInterface (object): SFTP sessions). However, raising an exception will usually cause the SFTP session to abruptly end, so you will usually want to catch exceptions and return an appropriate error code. - + All paths are in string form instead of unicode because not all SFTP clients & servers obey the requirement that paths be encoded in UTF-8. """ - + def __init__(self, server, *largs, **kwargs): """ Create a new SFTPServerInterface object. This method does nothing by default and is meant to be overridden by subclasses. - + :param .ServerInterface server: the server object associated with this channel and SFTP subsystem """ @@ -73,7 +73,7 @@ def open(self, path, flags, attr): on that file. On success, a new object subclassed from `.SFTPHandle` should be returned. This handle will be used for future operations on the file (read, write, etc). On failure, an error code such as - `.SFTP_PERMISSION_DENIED` should be returned. + ``SFTP_PERMISSION_DENIED`` should be returned. ``flags`` contains the requested mode for opening (read-only, write-append, etc) as a bitset of flags from the ``os`` module: @@ -92,7 +92,7 @@ def open(self, path, flags, attr): The ``attr`` object contains requested attributes of the file if it has to be created. Some or all attribute fields may be missing if the client didn't specify them. - + .. note:: The SFTP protocol defines all files to be in "binary" mode. There is no equivalent to Python's "text" mode. @@ -121,13 +121,14 @@ def list_folder(self, path): `.SFTPAttributes.from_stat` will usually do what you want. In case of an error, you should return one of the ``SFTP_*`` error - codes, such as `.SFTP_PERMISSION_DENIED`. + codes, such as ``SFTP_PERMISSION_DENIED``. - :param str path: the requested path (relative or absolute) to be listed. + :param str path: the requested path (relative or absolute) to be + listed. :return: a list of the files in the given folder, using `.SFTPAttributes` objects. - + .. note:: You should normalize the given ``path`` first (see the `os.path` module) and check appropriate permissions before returning the list @@ -150,7 +151,7 @@ def stat(self, path): for. :return: an `.SFTPAttributes` object for the given file, or an SFTP error - code (like `.SFTP_PERMISSION_DENIED`). + code (like ``SFTP_PERMISSION_DENIED``). """ return SFTP_OP_UNSUPPORTED @@ -168,7 +169,7 @@ def lstat(self, path): :type path: str :return: an `.SFTPAttributes` object for the given file, or an SFTP error - code (like `.SFTP_PERMISSION_DENIED`). + code (like ``SFTP_PERMISSION_DENIED``). """ return SFTP_OP_UNSUPPORTED @@ -178,7 +179,7 @@ def remove(self, path): :param str path: the requested path (relative or absolute) of the file to delete. - :return: an SFTP error code `int` like `.SFTP_OK`. + :return: an SFTP error code `int` like ``SFTP_OK``. """ return SFTP_OP_UNSUPPORTED @@ -189,15 +190,34 @@ def rename(self, oldpath, newpath): and since there's no other (easy) way to move files via SFTP, it's probably a good idea to implement "move" in this method too, even for files that cross disk partition boundaries, if at all possible. - + .. note:: You should return an error if a file with the same name as ``newpath`` already exists. (The rename operation should be non-desctructive.) + .. note:: + This method implements 'standard' SFTP ``RENAME`` behavior; those + seeking the OpenSSH "POSIX rename" extension behavior should use + `posix_rename`. + + :param str oldpath: + the requested path (relative or absolute) of the existing file. + :param str newpath: the requested new path of the file. + :return: an SFTP error code `int` like ``SFTP_OK``. + """ + return SFTP_OP_UNSUPPORTED + + def posix_rename(self, oldpath, newpath): + """ + Rename (or move) a file, following posix conventions. If newpath + already exists, it will be overwritten. + :param str oldpath: the requested path (relative or absolute) of the existing file. :param str newpath: the requested new path of the file. - :return: an SFTP error code `int` like `.SFTP_OK`. + :return: an SFTP error code `int` like ``SFTP_OK``. + + :versionadded: 2.2 """ return SFTP_OP_UNSUPPORTED @@ -208,13 +228,13 @@ def mkdir(self, path, attr): The ``attr`` object will contain only those fields provided by the client in its request, so you should use ``hasattr`` to check for - the presense of fields before using them. In some cases, the ``attr`` + the presence of fields before using them. In some cases, the ``attr`` object may be completely empty. :param str path: requested path (relative or absolute) of the new folder. :param .SFTPAttributes attr: requested attributes of the new folder. - :return: an SFTP error code `int` like `.SFTP_OK`. + :return: an SFTP error code `int` like ``SFTP_OK``. """ return SFTP_OP_UNSUPPORTED @@ -226,7 +246,7 @@ def rmdir(self, path): :param str path: requested path (relative or absolute) of the folder to remove. - :return: an SFTP error code `int` like `.SFTP_OK`. + :return: an SFTP error code `int` like ``SFTP_OK``. """ return SFTP_OP_UNSUPPORTED @@ -241,7 +261,7 @@ def chattr(self, path, attr): :param attr: requested attributes to change on the file (an `.SFTPAttributes` object) - :return: an error code `int` like `.SFTP_OK`. + :return: an error code `int` like ``SFTP_OK``. """ return SFTP_OP_UNSUPPORTED @@ -262,30 +282,30 @@ def canonicalize(self, path): if os.path.isabs(path): out = os.path.normpath(path) else: - out = os.path.normpath('/' + path) - if sys.platform == 'win32': + out = os.path.normpath("/" + path) + if sys.platform == "win32": # on windows, normalize backslashes to sftp/posix format - out = out.replace('\\', '/') + out = out.replace("\\", "/") return out - + def readlink(self, path): """ Return the target of a symbolic link (or shortcut) on the server. If the specified path doesn't refer to a symbolic link, an error should be returned. - + :param str path: path (relative or absolute) of the symbolic link. :return: the target `str` path of the symbolic link, or an error code like - `.SFTP_NO_SUCH_FILE`. + ``SFTP_NO_SUCH_FILE``. """ return SFTP_OP_UNSUPPORTED - + def symlink(self, target_path, path): """ Create a symbolic link on the server, as new pathname ``path``, with ``target_path`` as the target of the link. - + :param str target_path: path (relative or absolute) of the target for this new symbolic link. diff --git a/paramiko/ssh_exception.py b/paramiko/ssh_exception.py index ed36a9525..39fcb10dd 100644 --- a/paramiko/ssh_exception.py +++ b/paramiko/ssh_exception.py @@ -19,122 +19,154 @@ import socket -class SSHException (Exception): +class SSHException(Exception): """ Exception raised by failures in SSH2 protocol negotiation or logic errors. """ + pass -class AuthenticationException (SSHException): +class AuthenticationException(SSHException): """ Exception raised when authentication failed for some reason. It may be possible to retry with different credentials. (Other classes specify more specific reasons.) - + .. versionadded:: 1.6 """ + pass - -class PasswordRequiredException (AuthenticationException): + +class PasswordRequiredException(AuthenticationException): """ Exception raised when a password is needed to unlock a private key file. """ + pass -class BadAuthenticationType (AuthenticationException): +class BadAuthenticationType(AuthenticationException): """ Exception raised when an authentication type (like password) is used, but the server isn't allowing that type. (It may only allow public-key, for example.) - - :ivar list allowed_types: - list of allowed authentication types provided by the server (possible - values are: ``"none"``, ``"password"``, and ``"publickey"``). - + .. versionadded:: 1.1 """ + allowed_types = [] - + + # TODO 3.0: remove explanation kwarg def __init__(self, explanation, types): - AuthenticationException.__init__(self, explanation) + # TODO 3.0: remove this supercall unless it's actually required for + # pickling (after fixing pickling) + AuthenticationException.__init__(self, explanation, types) + self.explanation = explanation self.allowed_types = types - # for unpickling - self.args = (explanation, types, ) def __str__(self): - return SSHException.__str__(self) + ' (allowed_types=%r)' % self.allowed_types + return "{}; allowed types: {!r}".format( + self.explanation, self.allowed_types + ) -class PartialAuthentication (AuthenticationException): +class PartialAuthentication(AuthenticationException): """ An internal exception thrown in the case of partial authentication. """ + allowed_types = [] - + def __init__(self, types): - AuthenticationException.__init__(self, 'partial authentication') + AuthenticationException.__init__(self, types) self.allowed_types = types - # for unpickling - self.args = (types, ) + + def __str__(self): + return "Partial authentication; allowed types: {!r}".format( + self.allowed_types + ) -class ChannelException (SSHException): +class ChannelException(SSHException): """ Exception raised when an attempt to open a new `.Channel` fails. - - :ivar int code: the error code returned by the server - + + :param int code: the error code returned by the server + .. versionadded:: 1.6 """ + def __init__(self, code, text): - SSHException.__init__(self, text) + SSHException.__init__(self, code, text) self.code = code - # for unpickling - self.args = (code, text, ) + self.text = text + def __str__(self): + return "ChannelException({!r}, {!r})".format(self.code, self.text) -class BadHostKeyException (SSHException): + +class BadHostKeyException(SSHException): """ The host key given by the SSH server did not match what we were expecting. - - :ivar str hostname: the hostname of the SSH server - :ivar PKey got_key: the host key presented by the server - :ivar PKey expected_key: the host key expected - + + :param str hostname: the hostname of the SSH server + :param PKey got_key: the host key presented by the server + :param PKey expected_key: the host key expected + .. versionadded:: 1.6 """ + def __init__(self, hostname, got_key, expected_key): - SSHException.__init__(self, - 'Host key for server %s does not match : got %s expected %s' % ( - hostname, - got_key.get_base64(), - expected_key.get_base64())) + SSHException.__init__(self, hostname, got_key, expected_key) self.hostname = hostname self.key = got_key self.expected_key = expected_key - # for unpickling - self.args = (hostname, got_key, expected_key, ) + def __str__(self): + msg = ( + "Host key for server '{}' does not match: got '{}', expected '{}'" + ) # noqa + return msg.format( + self.hostname, + self.key.get_base64(), + self.expected_key.get_base64(), + ) + + +class IncompatiblePeer(SSHException): + """ + A disagreement arose regarding an algorithm required for key exchange. + + .. versionadded:: 2.9 + """ + + # TODO 3.0: consider making this annotate w/ 1..N 'missing' algorithms, + # either just the first one that would halt kex, or even updating the + # Transport logic so we record /all/ that /could/ halt kex. + # TODO: update docstrings where this may end up raised so they are more + # specific. + pass -class ProxyCommandFailure (SSHException): + +class ProxyCommandFailure(SSHException): """ The "ProxyCommand" found in the .ssh/config file returned an error. - :ivar str command: The command line that is generating this exception. - :ivar str error: The error captured from the proxy command output. + :param str command: The command line that is generating this exception. + :param str error: The error captured from the proxy command output. """ + def __init__(self, command, error): - SSHException.__init__(self, - '"ProxyCommand (%s)" returned non-zero exit status: %s' % ( - command, error - ) - ) + SSHException.__init__(self, command, error) + self.command = command self.error = error - # for unpickling - self.args = (command, error, ) + + def __str__(self): + return 'ProxyCommand("{}") returned nonzero exit status: {}'.format( + self.command, self.error + ) class NoValidConnectionsError(socket.error): @@ -147,7 +179,7 @@ class NoValidConnectionsError(socket.error): `socket.error` subclass, message, etc) we expose a single unified error message and a ``None`` errno so that instances of this class match most normal handling of `socket.error` objects. - + To see the wrapped exception objects, access the ``errors`` attribute. ``errors`` is a dict whose keys are address tuples (e.g. ``('127.0.0.1', 22)``) and whose values are the exception encountered trying to connect to @@ -159,23 +191,47 @@ class NoValidConnectionsError(socket.error): .. versionadded:: 1.16 """ + def __init__(self, errors): """ :param dict errors: The errors dict to store, as described by class docstring. """ addrs = sorted(errors.keys()) - body = ', '.join([x[0] for x in addrs[:-1]]) + body = ", ".join([x[0] for x in addrs[:-1]]) tail = addrs[-1][0] if body: msg = "Unable to connect to port {0} on {1} or {2}" else: msg = "Unable to connect to port {0} on {2}" super(NoValidConnectionsError, self).__init__( - None, # stand-in for errno - msg.format(addrs[0][1], body, tail) + None, msg.format(addrs[0][1], body, tail) # stand-in for errno ) self.errors = errors def __reduce__(self): - return (self.__class__, (self.errors, )) + return (self.__class__, (self.errors,)) + + +class CouldNotCanonicalize(SSHException): + """ + Raised when hostname canonicalization fails & fallback is disabled. + + .. versionadded:: 2.7 + """ + + pass + + +class ConfigParseError(SSHException): + """ + A fatal error was encountered trying to parse SSH config data. + + Typically this means a config file violated the ``ssh_config`` + specification in a manner that requires exiting immediately, such as not + matching ``key = value`` syntax or misusing certain ``Match`` keywords. + + .. versionadded:: 2.7 + """ + + pass diff --git a/paramiko/ssh_gss.py b/paramiko/ssh_gss.py index e906a851b..5d4cb4166 100644 --- a/paramiko/ssh_gss.py +++ b/paramiko/ssh_gss.py @@ -33,34 +33,47 @@ import os import sys -""" -:var bool GSS_AUTH_AVAILABLE: - Constraint that indicates if GSS-API / SSPI is available. -""" + +#: A boolean constraint that indicates if GSS-API / SSPI is available. GSS_AUTH_AVAILABLE = True -from pyasn1.type.univ import ObjectIdentifier -from pyasn1.codec.der import encoder, decoder -from paramiko.common import MSG_USERAUTH_REQUEST -from paramiko.ssh_exception import SSHException +#: A tuple of the exception types used by the underlying GSSAPI implementation. +GSS_EXCEPTIONS = () -""" -:var str _API: Constraint for the used API -""" -_API = "MIT" + +#: :var str _API: Constraint for the used API +_API = None try: import gssapi + + if hasattr(gssapi, "__title__") and gssapi.__title__ == "python-gssapi": + # old, unmaintained python-gssapi package + _API = "MIT" # keep this for compatibility + GSS_EXCEPTIONS = (gssapi.GSSException,) + else: + _API = "PYTHON-GSSAPI-NEW" + GSS_EXCEPTIONS = ( + gssapi.exceptions.GeneralError, + gssapi.raw.misc.GSSError, + ) except (ImportError, OSError): try: + import pywintypes import sspicon import sspi + _API = "SSPI" + GSS_EXCEPTIONS = (pywintypes.error,) except ImportError: GSS_AUTH_AVAILABLE = False _API = None +from paramiko.common import MSG_USERAUTH_REQUEST +from paramiko.ssh_exception import SSHException +from paramiko._version import __version_info__ + def GSSAuth(auth_method, gss_deleg_creds=True): """ @@ -70,22 +83,24 @@ def GSSAuth(auth_method, gss_deleg_creds=True): (gssapi-with-mic or gss-keyex) :param bool gss_deleg_creds: Delegate client credentials or not. We delegate credentials by default. - :return: Either an `._SSH_GSSAPI` (Unix) object or an - `_SSH_SSPI` (Windows) object - :rtype: Object + :return: Either an `._SSH_GSSAPI_OLD` or `._SSH_GSSAPI_NEW` (Unix) + object or an `_SSH_SSPI` (Windows) object + :rtype: object - :raise ImportError: If no GSS-API / SSPI module could be imported. + :raises: ``ImportError`` -- If no GSS-API / SSPI module could be imported. :see: `RFC 4462 `_ - :note: Check for the available API and return either an `._SSH_GSSAPI` - (MIT GSSAPI) object or an `._SSH_SSPI` (MS SSPI) object. If you - get python-gssapi working on Windows, python-gssapi - will be used and a `._SSH_GSSAPI` object will be returned. + :note: Check for the available API and return either an `._SSH_GSSAPI_OLD` + (MIT GSSAPI using python-gssapi package) object, an + `._SSH_GSSAPI_NEW` (MIT GSSAPI using gssapi package) object + or an `._SSH_SSPI` (MS SSPI) object. If there is no supported API available, ``None`` will be returned. """ if _API == "MIT": - return _SSH_GSSAPI(auth_method, gss_deleg_creds) + return _SSH_GSSAPI_OLD(auth_method, gss_deleg_creds) + elif _API == "PYTHON-GSSAPI-NEW": + return _SSH_GSSAPI_NEW(auth_method, gss_deleg_creds) elif _API == "SSPI" and os.name == "nt": return _SSH_SSPI(auth_method, gss_deleg_creds) else: @@ -94,9 +109,10 @@ def GSSAuth(auth_method, gss_deleg_creds=True): class _SSH_GSSAuth(object): """ - Contains the shared variables and methods of `._SSH_GSSAPI` and - `._SSH_SSPI`. + Contains the shared variables and methods of `._SSH_GSSAPI_OLD`, + `._SSH_GSSAPI_NEW` and `._SSH_SSPI`. """ + def __init__(self, auth_method, gss_deleg_creds): """ :param str auth_method: The name of the SSH authentication mechanism @@ -131,7 +147,6 @@ def set_service(self, service): as the only service value. :param str service: The desired SSH service - :rtype: Void """ if service.find("ssh-"): self._service = service @@ -142,7 +157,6 @@ def set_username(self, username): username is not set by C{ssh_init_sec_context}. :param str username: The name of the user who attempts to login - :rtype: Void """ self._username = username @@ -155,10 +169,12 @@ def ssh_gss_oids(self, mode="client"): :return: A byte sequence containing the number of supported OIDs, the length of the OID and the actual OID encoded with DER - :rtype: Bytes :note: In server mode we just return the OID length and the DER encoded OID. """ + from pyasn1.type.univ import ObjectIdentifier + from pyasn1.codec.der import encoder + OIDs = self._make_uint32(1) krb5_OID = encoder.encode(ObjectIdentifier(self._krb5_mech)) OID_len = self._make_uint32(len(krb5_OID)) @@ -172,22 +188,22 @@ def ssh_check_mech(self, desired_mech): :param str desired_mech: The desired GSS-API mechanism of the client :return: ``True`` if the given OID is supported, otherwise C{False} - :rtype: Boolean """ + from pyasn1.codec.der import decoder + mech, __ = decoder.decode(desired_mech) if mech.__str__() != self._krb5_mech: return False return True # Internals - #-------------------------------------------------------------------------- + # ------------------------------------------------------------------------- def _make_uint32(self, integer): """ Create a 32 bit unsigned integer (The byte sequence of an integer). :param int integer: The integer value to convert :return: The byte sequence of an 32 bit integer - :rtype: Bytes """ return struct.pack("!I", integer) @@ -207,11 +223,10 @@ def _ssh_build_mic(self, session_id, username, service, auth_method): string service (ssh-connection), string authentication-method (gssapi-with-mic or gssapi-keyex) - :rtype: Bytes """ mic = self._make_uint32(len(session_id)) mic += session_id - mic += struct.pack('B', MSG_USERAUTH_REQUEST) + mic += struct.pack("B", MSG_USERAUTH_REQUEST) mic += self._make_uint32(len(username)) mic += username.encode() mic += self._make_uint32(len(service)) @@ -221,12 +236,14 @@ def _ssh_build_mic(self, session_id, username, service, auth_method): return mic -class _SSH_GSSAPI(_SSH_GSSAuth): +class _SSH_GSSAPI_OLD(_SSH_GSSAuth): """ - Implementation of the GSS-API MIT Kerberos Authentication for SSH2. + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the older (unmaintained) python-gssapi package. :see: `.GSSAuth` """ + def __init__(self, auth_method, gss_deleg_creds): """ :param str auth_method: The name of the SSH authentication mechanism @@ -236,17 +253,22 @@ def __init__(self, auth_method, gss_deleg_creds): _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) if self._gss_deleg_creds: - self._gss_flags = (gssapi.C_PROT_READY_FLAG, - gssapi.C_INTEG_FLAG, - gssapi.C_MUTUAL_FLAG, - gssapi.C_DELEG_FLAG) + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + gssapi.C_DELEG_FLAG, + ) else: - self._gss_flags = (gssapi.C_PROT_READY_FLAG, - gssapi.C_INTEG_FLAG, - gssapi.C_MUTUAL_FLAG) + self._gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + ) - def ssh_init_sec_context(self, target, desired_mech=None, - username=None, recv_token=None): + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): """ Initialize a GSS-API context. @@ -256,16 +278,19 @@ def ssh_init_sec_context(self, target, desired_mech=None, ("pseudo negotiated" mechanism, because we support just the krb5 mechanism :-)) :param str recv_token: The GSS-API token received from the Server - :raise SSHException: Is raised if the desired mechanism of the client - is not supported - :return: A ``String`` if the GSS-API has returned a token or ``None`` if - no token was returned - :rtype: String or None + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported + :return: A ``String`` if the GSS-API has returned a token or + ``None`` if no token was returned """ + from pyasn1.codec.der import decoder + self._username = username self._gss_host = target - targ_name = gssapi.Name("host@" + self._gss_host, - gssapi.C_NT_HOSTBASED_SERVICE) + targ_name = gssapi.Name( + "host@" + self._gss_host, gssapi.C_NT_HOSTBASED_SERVICE + ) ctx = gssapi.Context() ctx.flags = self._gss_flags if desired_mech is None: @@ -279,15 +304,17 @@ def ssh_init_sec_context(self, target, desired_mech=None, token = None try: if recv_token is None: - self._gss_ctxt = gssapi.InitContext(peer_name=targ_name, - mech_type=krb5_mech, - req_flags=ctx.flags) + self._gss_ctxt = gssapi.InitContext( + peer_name=targ_name, + mech_type=krb5_mech, + req_flags=ctx.flags, + ) token = self._gss_ctxt.step(token) else: token = self._gss_ctxt.step(recv_token) except gssapi.GSSException: - raise gssapi.GSSException("{0} Target: {1}".format(sys.exc_info()[1], - self._gss_host)) + message = "{} Target: {}".format(sys.exc_info()[1], self._gss_host) + raise gssapi.GSSException(message) self._gss_ctxt_status = self._gss_ctxt.established return token @@ -303,15 +330,15 @@ def ssh_get_mic(self, session_id, gss_kex=False): gssapi-keyex: Returns the MIC token from GSS-API with the SSH session ID as message. - :rtype: String - :see: `._ssh_build_mic` """ self._session_id = session_id if not gss_kex: - mic_field = self._ssh_build_mic(self._session_id, - self._username, - self._service, - self._auth_method) + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) mic_token = self._gss_ctxt.get_mic(mic_field) else: # for key exchange with gssapi-keyex @@ -328,7 +355,6 @@ def ssh_accept_sec_context(self, hostname, recv_token, username=None): if it's not the initial call. :return: A ``String`` if the GSS-API has returned a token or ``None`` if no token was returned - :rtype: String or None """ # hostname and username are not required for GSSAPI, but for SSPI self._gss_host = hostname @@ -347,22 +373,23 @@ def ssh_check_mic(self, mic_token, session_id, username=None): :param str session_id: The SSH session ID :param str username: The name of the user who attempts to login :return: None if the MIC check was successful - :raises gssapi.GSSException: if the MIC check failed + :raises: ``gssapi.GSSException`` -- if the MIC check failed """ self._session_id = session_id self._username = username if self._username is not None: # server mode - mic_field = self._ssh_build_mic(self._session_id, - self._username, - self._service, - self._auth_method) + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) self._gss_srv_ctxt.verify_mic(mic_field, mic_token) else: # for key exchange with gssapi-keyex # client mode - self._gss_ctxt.verify_mic(self._session_id, - mic_token) + self._gss_ctxt.verify_mic(self._session_id, mic_token) @property def credentials_delegated(self): @@ -370,7 +397,6 @@ def credentials_delegated(self): Checks if credentials are delegated (server mode). :return: ``True`` if credentials are delegated, otherwise ``False`` - :rtype: bool """ if self._gss_srv_ctxt.delegated_cred is not None: return True @@ -383,8 +409,189 @@ def save_client_creds(self, client_token): (server mode). :param str client_token: The GSS-API token received form the client - :raise NotImplementedError: Credential delegation is currently not - supported in server mode + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode + """ + raise NotImplementedError + + +if __version_info__ < (2, 5): + # provide the old name for strict backward compatibility + _SSH_GSSAPI = _SSH_GSSAPI_OLD + + +class _SSH_GSSAPI_NEW(_SSH_GSSAuth): + """ + Implementation of the GSS-API MIT Kerberos Authentication for SSH2, + using the newer, currently maintained gssapi package. + + :see: `.GSSAuth` + """ + + def __init__(self, auth_method, gss_deleg_creds): + """ + :param str auth_method: The name of the SSH authentication mechanism + (gssapi-with-mic or gss-keyex) + :param bool gss_deleg_creds: Delegate client credentials or not + """ + _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) + + if self._gss_deleg_creds: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.delegate_to_peer, + ) + else: + self._gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + ) + + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): + """ + Initialize a GSS-API context. + + :param str username: The name of the user who attempts to login + :param str target: The hostname of the target to connect to + :param str desired_mech: The negotiated GSS-API mechanism + ("pseudo negotiated" mechanism, because we + support just the krb5 mechanism :-)) + :param str recv_token: The GSS-API token received from the Server + :raises: `.SSHException` -- Is raised if the desired mechanism of the + client is not supported + :raises: ``gssapi.exceptions.GSSError`` if there is an error signaled + by the GSS-API implementation + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + from pyasn1.codec.der import decoder + + self._username = username + self._gss_host = target + targ_name = gssapi.Name( + "host@" + self._gss_host, + name_type=gssapi.NameType.hostbased_service, + ) + if desired_mech is not None: + mech, __ = decoder.decode(desired_mech) + if mech.__str__() != self._krb5_mech: + raise SSHException("Unsupported mechanism OID.") + krb5_mech = gssapi.MechType.kerberos + token = None + if recv_token is None: + self._gss_ctxt = gssapi.SecurityContext( + name=targ_name, + flags=self._gss_flags, + mech=krb5_mech, + usage="initiate", + ) + token = self._gss_ctxt.step(token) + else: + token = self._gss_ctxt.step(recv_token) + self._gss_ctxt_status = self._gss_ctxt.complete + return token + + def ssh_get_mic(self, session_id, gss_kex=False): + """ + Create the MIC token for a SSH2 message. + + :param str session_id: The SSH session ID + :param bool gss_kex: Generate the MIC for GSS-API Key Exchange or not + :return: gssapi-with-mic: + Returns the MIC token from GSS-API for the message we created + with ``_ssh_build_mic``. + gssapi-keyex: + Returns the MIC token from GSS-API with the SSH session ID as + message. + :rtype: str + """ + self._session_id = session_id + if not gss_kex: + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + mic_token = self._gss_ctxt.get_signature(mic_field) + else: + # for key exchange with gssapi-keyex + mic_token = self._gss_srv_ctxt.get_signature(self._session_id) + return mic_token + + def ssh_accept_sec_context(self, hostname, recv_token, username=None): + """ + Accept a GSS-API context (server mode). + + :param str hostname: The servers hostname + :param str username: The name of the user who attempts to login + :param str recv_token: The GSS-API Token received from the server, + if it's not the initial call. + :return: A ``String`` if the GSS-API has returned a token or ``None`` + if no token was returned + """ + # hostname and username are not required for GSSAPI, but for SSPI + self._gss_host = hostname + self._username = username + if self._gss_srv_ctxt is None: + self._gss_srv_ctxt = gssapi.SecurityContext(usage="accept") + token = self._gss_srv_ctxt.step(recv_token) + self._gss_srv_ctxt_status = self._gss_srv_ctxt.complete + return token + + def ssh_check_mic(self, mic_token, session_id, username=None): + """ + Verify the MIC token for a SSH2 message. + + :param str mic_token: The MIC token received from the client + :param str session_id: The SSH session ID + :param str username: The name of the user who attempts to login + :return: None if the MIC check was successful + :raises: ``gssapi.exceptions.GSSError`` -- if the MIC check failed + """ + self._session_id = session_id + self._username = username + if self._username is not None: + # server mode + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + self._gss_srv_ctxt.verify_signature(mic_field, mic_token) + else: + # for key exchange with gssapi-keyex + # client mode + self._gss_ctxt.verify_signature(self._session_id, mic_token) + + @property + def credentials_delegated(self): + """ + Checks if credentials are delegated (server mode). + + :return: ``True`` if credentials are delegated, otherwise ``False`` + :rtype: bool + """ + if self._gss_srv_ctxt.delegated_creds is not None: + return True + return False + + def save_client_creds(self, client_token): + """ + Save the Client token in a file. This is used by the SSH server + to store the client credentials if credentials are delegated + (server mode). + + :param str client_token: The GSS-API token received form the client + :raises: ``NotImplementedError`` -- Credential delegation is currently + not supported in server mode """ raise NotImplementedError @@ -395,6 +602,7 @@ class _SSH_SSPI(_SSH_GSSAuth): :see: `.GSSAuth` """ + def __init__(self, auth_method, gss_deleg_creds): """ :param str auth_method: The name of the SSH authentication mechanism @@ -404,15 +612,19 @@ def __init__(self, auth_method, gss_deleg_creds): _SSH_GSSAuth.__init__(self, auth_method, gss_deleg_creds) if self._gss_deleg_creds: - self._gss_flags = sspicon.ISC_REQ_INTEGRITY |\ - sspicon.ISC_REQ_MUTUAL_AUTH |\ - sspicon.ISC_REQ_DELEGATE + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY + | sspicon.ISC_REQ_MUTUAL_AUTH + | sspicon.ISC_REQ_DELEGATE + ) else: - self._gss_flags = sspicon.ISC_REQ_INTEGRITY |\ - sspicon.ISC_REQ_MUTUAL_AUTH + self._gss_flags = ( + sspicon.ISC_REQ_INTEGRITY | sspicon.ISC_REQ_MUTUAL_AUTH + ) - def ssh_init_sec_context(self, target, desired_mech=None, - username=None, recv_token=None): + def ssh_init_sec_context( + self, target, desired_mech=None, username=None, recv_token=None + ): """ Initialize a SSPI context. @@ -422,12 +634,14 @@ def ssh_init_sec_context(self, target, desired_mech=None, ("pseudo negotiated" mechanism, because we support just the krb5 mechanism :-)) :param recv_token: The SSPI token received from the Server - :raise SSHException: Is raised if the desired mechanism of the client - is not supported + :raises: + `.SSHException` -- Is raised if the desired mechanism of the client + is not supported :return: A ``String`` if the SSPI has returned a token or ``None`` if no token was returned - :rtype: String or None """ + from pyasn1.codec.der import decoder + self._username = username self._gss_host = target error = 0 @@ -438,14 +652,15 @@ def ssh_init_sec_context(self, target, desired_mech=None, raise SSHException("Unsupported mechanism OID.") try: if recv_token is None: - self._gss_ctxt = sspi.ClientAuth("Kerberos", - scflags=self._gss_flags, - targetspn=targ_name) + self._gss_ctxt = sspi.ClientAuth( + "Kerberos", scflags=self._gss_flags, targetspn=targ_name + ) error, token = self._gss_ctxt.authorize(recv_token) token = token[0].Buffer - except: - raise Exception("{0}, Target: {1}".format(sys.exc_info()[1], - self._gss_host)) + except pywintypes.error as e: + e.strerror += ", Target: {}".format(self._gss_host) + raise + if error == 0: """ if the status is GSS_COMPLETE (error = 0) the context is fully @@ -471,15 +686,15 @@ def ssh_get_mic(self, session_id, gss_kex=False): gssapi-keyex: Returns the MIC token from SSPI with the SSH session ID as message. - :rtype: String - :see: `._ssh_build_mic` """ self._session_id = session_id if not gss_kex: - mic_field = self._ssh_build_mic(self._session_id, - self._username, - self._service, - self._auth_method) + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) mic_token = self._gss_ctxt.sign(mic_field) else: # for key exchange with gssapi-keyex @@ -496,7 +711,6 @@ def ssh_accept_sec_context(self, hostname, username, recv_token): if it's not the initial call. :return: A ``String`` if the SSPI has returned a token or ``None`` if no token was returned - :rtype: String or None """ self._gss_host = hostname self._username = username @@ -517,23 +731,25 @@ def ssh_check_mic(self, mic_token, session_id, username=None): :param str session_id: The SSH session ID :param str username: The name of the user who attempts to login :return: None if the MIC check was successful - :raises sspi.error: if the MIC check failed + :raises: ``sspi.error`` -- if the MIC check failed """ self._session_id = session_id self._username = username if username is not None: # server mode - mic_field = self._ssh_build_mic(self._session_id, - self._username, - self._service, - self._auth_method) - # Verifies data and its signature. If verification fails, an + mic_field = self._ssh_build_mic( + self._session_id, + self._username, + self._service, + self._auth_method, + ) + # Verifies data and its signature. If verification fails, an # sspi.error will be raised. self._gss_srv_ctxt.verify(mic_field, mic_token) else: # for key exchange with gssapi-keyex # client mode - # Verifies data and its signature. If verification fails, an + # Verifies data and its signature. If verification fails, an # sspi.error will be raised. self._gss_ctxt.verify(self._session_id, mic_token) @@ -543,13 +759,10 @@ def credentials_delegated(self): Checks if credentials are delegated (server mode). :return: ``True`` if credentials are delegated, otherwise ``False`` - :rtype: Boolean """ - return ( - self._gss_flags & sspicon.ISC_REQ_DELEGATE - ) and ( - self._gss_srv_ctxt_status or (self._gss_flags) - ) + return self._gss_flags & sspicon.ISC_REQ_DELEGATE and ( + self._gss_srv_ctxt_status or self._gss_flags + ) def save_client_creds(self, client_token): """ @@ -558,7 +771,8 @@ def save_client_creds(self, client_token): (server mode). :param str client_token: The SSPI token received form the client - :raise NotImplementedError: Credential delegation is currently not - supported in server mode + :raises: + ``NotImplementedError`` -- Credential delegation is currently not + supported in server mode """ raise NotImplementedError diff --git a/paramiko/transport.py b/paramiko/transport.py index b52d31580..b99b3278d 100644 --- a/paramiko/transport.py +++ b/paramiko/transport.py @@ -1,4 +1,5 @@ # Copyright (C) 2003-2007 Robey Pointer +# Copyright (C) 2003-2007 Robey Pointer # # This file is part of paramiko. # @@ -37,24 +38,65 @@ from paramiko.auth_handler import AuthHandler from paramiko.ssh_gss import GSSAuth from paramiko.channel import Channel -from paramiko.common import xffffffff, cMSG_CHANNEL_OPEN, cMSG_IGNORE, \ - cMSG_GLOBAL_REQUEST, DEBUG, MSG_KEXINIT, MSG_IGNORE, MSG_DISCONNECT, \ - MSG_DEBUG, ERROR, WARNING, cMSG_UNIMPLEMENTED, INFO, cMSG_KEXINIT, \ - cMSG_NEWKEYS, MSG_NEWKEYS, cMSG_REQUEST_SUCCESS, cMSG_REQUEST_FAILURE, \ - CONNECTION_FAILED_CODE, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, \ - OPEN_SUCCEEDED, cMSG_CHANNEL_OPEN_FAILURE, cMSG_CHANNEL_OPEN_SUCCESS, \ - MSG_GLOBAL_REQUEST, MSG_REQUEST_SUCCESS, MSG_REQUEST_FAILURE, \ - MSG_CHANNEL_OPEN_SUCCESS, MSG_CHANNEL_OPEN_FAILURE, MSG_CHANNEL_OPEN, \ - MSG_CHANNEL_SUCCESS, MSG_CHANNEL_FAILURE, MSG_CHANNEL_DATA, \ - MSG_CHANNEL_EXTENDED_DATA, MSG_CHANNEL_WINDOW_ADJUST, MSG_CHANNEL_REQUEST, \ - MSG_CHANNEL_EOF, MSG_CHANNEL_CLOSE, MIN_WINDOW_SIZE, MIN_PACKET_SIZE, \ - MAX_WINDOW_SIZE, DEFAULT_WINDOW_SIZE, DEFAULT_MAX_PACKET_SIZE +from paramiko.common import ( + xffffffff, + cMSG_CHANNEL_OPEN, + cMSG_IGNORE, + cMSG_GLOBAL_REQUEST, + DEBUG, + MSG_KEXINIT, + MSG_IGNORE, + MSG_DISCONNECT, + MSG_DEBUG, + ERROR, + WARNING, + cMSG_UNIMPLEMENTED, + INFO, + cMSG_KEXINIT, + cMSG_NEWKEYS, + MSG_NEWKEYS, + cMSG_REQUEST_SUCCESS, + cMSG_REQUEST_FAILURE, + CONNECTION_FAILED_CODE, + OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, + OPEN_SUCCEEDED, + cMSG_CHANNEL_OPEN_FAILURE, + cMSG_CHANNEL_OPEN_SUCCESS, + MSG_GLOBAL_REQUEST, + MSG_REQUEST_SUCCESS, + MSG_REQUEST_FAILURE, + MSG_CHANNEL_OPEN_SUCCESS, + MSG_CHANNEL_OPEN_FAILURE, + MSG_CHANNEL_OPEN, + MSG_CHANNEL_SUCCESS, + MSG_CHANNEL_FAILURE, + MSG_CHANNEL_DATA, + MSG_CHANNEL_EXTENDED_DATA, + MSG_CHANNEL_WINDOW_ADJUST, + MSG_CHANNEL_REQUEST, + MSG_CHANNEL_EOF, + MSG_CHANNEL_CLOSE, + MIN_WINDOW_SIZE, + MIN_PACKET_SIZE, + MAX_WINDOW_SIZE, + DEFAULT_WINDOW_SIZE, + DEFAULT_MAX_PACKET_SIZE, + HIGHEST_USERAUTH_MESSAGE_ID, + MSG_UNIMPLEMENTED, + MSG_NAMES, + MSG_EXT_INFO, + cMSG_EXT_INFO, +) from paramiko.compress import ZlibCompressor, ZlibDecompressor from paramiko.dsskey import DSSKey +from paramiko.ed25519key import Ed25519Key +from paramiko.kex_curve25519 import KexCurve25519 from paramiko.kex_gex import KexGex, KexGexSHA256 from paramiko.kex_group1 import KexGroup1 -from paramiko.kex_group14 import KexGroup14 -from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14, NullHostKey +from paramiko.kex_group14 import KexGroup14, KexGroup14SHA256 +from paramiko.kex_group16 import KexGroup16SHA512 +from paramiko.kex_ecdh_nist import KexNistp256, KexNistp384, KexNistp521 +from paramiko.kex_gss import KexGSSGex, KexGSSGroup1, KexGSSGroup14 from paramiko.message import Message from paramiko.packet import Packetizer, NeedRekeyException from paramiko.primes import ModulusPack @@ -63,24 +105,31 @@ from paramiko.ecdsakey import ECDSAKey from paramiko.server import ServerInterface from paramiko.sftp_client import SFTPClient -from paramiko.ssh_exception import (SSHException, BadAuthenticationType, - ChannelException, ProxyCommandFailure) +from paramiko.ssh_exception import ( + SSHException, + BadAuthenticationType, + ChannelException, + IncompatiblePeer, + ProxyCommandFailure, +) from paramiko.util import retry_on_signal, ClosingContextManager, clamp_value - # for thread cleanup _active_threads = [] + def _join_lingering_threads(): for thr in _active_threads: thr.stop_thread() + import atexit + atexit.register(_join_lingering_threads) -class Transport (threading.Thread, ClosingContextManager): +class Transport(threading.Thread, ClosingContextManager): """ An SSH Transport attaches to a stream (usually a socket), negotiates an encrypted session, authenticates, and then creates stream tunnels, called @@ -90,156 +139,208 @@ class Transport (threading.Thread, ClosingContextManager): Instances of this class may be used as context managers. """ + _ENCRYPT = object() _DECRYPT = object() - _PROTO_ID = '2.0' - _CLIENT_ID = 'paramiko_%s' % paramiko.__version__ + _PROTO_ID = "2.0" + _CLIENT_ID = "paramiko_{}".format(paramiko.__version__) # These tuples of algorithm identifiers are in preference order; do not # reorder without reason! + # NOTE: if you need to modify these, we suggest leveraging the + # `disabled_algorithms` constructor argument (also available in SSHClient) + # instead of monkeypatching or subclassing. _preferred_ciphers = ( - 'aes128-ctr', - 'aes192-ctr', - 'aes256-ctr', - 'aes128-cbc', - 'blowfish-cbc', - 'aes192-cbc', - 'aes256-cbc', - '3des-cbc', - 'arcfour128', - 'arcfour256', + "aes128-ctr", + "aes192-ctr", + "aes256-ctr", + "aes128-cbc", + "aes192-cbc", + "aes256-cbc", + "blowfish-cbc", + "3des-cbc", ) _preferred_macs = ( - 'hmac-sha2-256', - 'hmac-sha2-512', - 'hmac-md5', - 'hmac-sha1-96', - 'hmac-md5-96', - 'hmac-sha1', + "hmac-sha2-256", + "hmac-sha2-512", + "hmac-sha2-256-etm@openssh.com", + "hmac-sha2-512-etm@openssh.com", + "hmac-sha1", + "hmac-md5", + "hmac-sha1-96", + "hmac-md5-96", ) + # ~= HostKeyAlgorithms in OpenSSH land _preferred_keys = ( - 'ssh-rsa', - 'ssh-dss', - ) + tuple(ECDSAKey.supported_key_format_identifiers()) - _preferred_kex = ( - 'diffie-hellman-group1-sha1', - 'diffie-hellman-group14-sha1', - 'diffie-hellman-group-exchange-sha1', - 'diffie-hellman-group-exchange-sha256', + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + "ssh-dss", + ) + # ~= PubKeyAcceptedAlgorithms + _preferred_pubkeys = ( + "ssh-ed25519", + "ecdsa-sha2-nistp256", + "ecdsa-sha2-nistp384", + "ecdsa-sha2-nistp521", + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + "ssh-dss", + ) + _preferred_kex = ( + "ecdh-sha2-nistp256", + "ecdh-sha2-nistp384", + "ecdh-sha2-nistp521", + "diffie-hellman-group16-sha512", + "diffie-hellman-group-exchange-sha256", + "diffie-hellman-group14-sha256", + "diffie-hellman-group-exchange-sha1", + "diffie-hellman-group14-sha1", + "diffie-hellman-group1-sha1", + ) + if KexCurve25519.is_available(): + _preferred_kex = ("curve25519-sha256@libssh.org",) + _preferred_kex + _preferred_gsskex = ( + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==", + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==", ) - _preferred_compression = ('none',) + _preferred_compression = ("none",) _cipher_info = { - 'aes128-ctr': { - 'class': algorithms.AES, - 'mode': modes.CTR, - 'block-size': 16, - 'key-size': 16 + "aes128-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 16, }, - 'aes192-ctr': { - 'class': algorithms.AES, - 'mode': modes.CTR, - 'block-size': 16, - 'key-size': 24 + "aes192-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 24, }, - 'aes256-ctr': { - 'class': algorithms.AES, - 'mode': modes.CTR, - 'block-size': 16, - 'key-size': 32 + "aes256-ctr": { + "class": algorithms.AES, + "mode": modes.CTR, + "block-size": 16, + "key-size": 32, }, - 'blowfish-cbc': { - 'class': algorithms.Blowfish, - 'mode': modes.CBC, - 'block-size': 8, - 'key-size': 16 + "blowfish-cbc": { + "class": algorithms.Blowfish, + "mode": modes.CBC, + "block-size": 8, + "key-size": 16, }, - 'aes128-cbc': { - 'class': algorithms.AES, - 'mode': modes.CBC, - 'block-size': 16, - 'key-size': 16 + "aes128-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 16, }, - 'aes192-cbc': { - 'class': algorithms.AES, - 'mode': modes.CBC, - 'block-size': 16, - 'key-size': 24 + "aes192-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 24, }, - 'aes256-cbc': { - 'class': algorithms.AES, - 'mode': modes.CBC, - 'block-size': 16, - 'key-size': 32 + "aes256-cbc": { + "class": algorithms.AES, + "mode": modes.CBC, + "block-size": 16, + "key-size": 32, }, - '3des-cbc': { - 'class': algorithms.TripleDES, - 'mode': modes.CBC, - 'block-size': 8, - 'key-size': 24 - }, - 'arcfour128': { - 'class': algorithms.ARC4, - 'mode': None, - 'block-size': 8, - 'key-size': 16 - }, - 'arcfour256': { - 'class': algorithms.ARC4, - 'mode': None, - 'block-size': 8, - 'key-size': 32 + "3des-cbc": { + "class": algorithms.TripleDES, + "mode": modes.CBC, + "block-size": 8, + "key-size": 24, }, } - _mac_info = { - 'hmac-sha1': {'class': sha1, 'size': 20}, - 'hmac-sha1-96': {'class': sha1, 'size': 12}, - 'hmac-sha2-256': {'class': sha256, 'size': 32}, - 'hmac-sha2-512': {'class': sha512, 'size': 64}, - 'hmac-md5': {'class': md5, 'size': 16}, - 'hmac-md5-96': {'class': md5, 'size': 12}, + "hmac-sha1": {"class": sha1, "size": 20}, + "hmac-sha1-96": {"class": sha1, "size": 12}, + "hmac-sha2-256": {"class": sha256, "size": 32}, + "hmac-sha2-256-etm@openssh.com": {"class": sha256, "size": 32}, + "hmac-sha2-512": {"class": sha512, "size": 64}, + "hmac-sha2-512-etm@openssh.com": {"class": sha512, "size": 64}, + "hmac-md5": {"class": md5, "size": 16}, + "hmac-md5-96": {"class": md5, "size": 12}, } _key_info = { - 'ssh-rsa': RSAKey, - 'ssh-dss': DSSKey, - 'ecdsa-sha2-nistp256': ECDSAKey, + # TODO: at some point we will want to drop this as it's no longer + # considered secure due to using SHA-1 for signatures. OpenSSH 8.8 no + # longer supports it. Question becomes at what point do we want to + # prevent users with older setups from using this? + "ssh-rsa": RSAKey, + "ssh-rsa-cert-v01@openssh.com": RSAKey, + "rsa-sha2-256": RSAKey, + "rsa-sha2-256-cert-v01@openssh.com": RSAKey, + "rsa-sha2-512": RSAKey, + "rsa-sha2-512-cert-v01@openssh.com": RSAKey, + "ssh-dss": DSSKey, + "ssh-dss-cert-v01@openssh.com": DSSKey, + "ecdsa-sha2-nistp256": ECDSAKey, + "ecdsa-sha2-nistp256-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp384": ECDSAKey, + "ecdsa-sha2-nistp384-cert-v01@openssh.com": ECDSAKey, + "ecdsa-sha2-nistp521": ECDSAKey, + "ecdsa-sha2-nistp521-cert-v01@openssh.com": ECDSAKey, + "ssh-ed25519": Ed25519Key, + "ssh-ed25519-cert-v01@openssh.com": Ed25519Key, } _kex_info = { - 'diffie-hellman-group1-sha1': KexGroup1, - 'diffie-hellman-group14-sha1': KexGroup14, - 'diffie-hellman-group-exchange-sha1': KexGex, - 'diffie-hellman-group-exchange-sha256': KexGexSHA256, - 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup1, - 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGroup14, - 'gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==': KexGSSGex + "diffie-hellman-group1-sha1": KexGroup1, + "diffie-hellman-group14-sha1": KexGroup14, + "diffie-hellman-group-exchange-sha1": KexGex, + "diffie-hellman-group-exchange-sha256": KexGexSHA256, + "diffie-hellman-group14-sha256": KexGroup14SHA256, + "diffie-hellman-group16-sha512": KexGroup16SHA512, + "gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup1, + "gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGroup14, + "gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==": KexGSSGex, + "ecdh-sha2-nistp256": KexNistp256, + "ecdh-sha2-nistp384": KexNistp384, + "ecdh-sha2-nistp521": KexNistp521, } + if KexCurve25519.is_available(): + _kex_info["curve25519-sha256@libssh.org"] = KexCurve25519 _compression_info = { # zlib@openssh.com is just zlib, but only turned on after a successful # authentication. openssh servers may only offer this type because # they've had troubles with security holes in zlib in the past. - 'zlib@openssh.com': (ZlibCompressor, ZlibDecompressor), - 'zlib': (ZlibCompressor, ZlibDecompressor), - 'none': (None, None), + "zlib@openssh.com": (ZlibCompressor, ZlibDecompressor), + "zlib": (ZlibCompressor, ZlibDecompressor), + "none": (None, None), } _modulus_pack = None - - def __init__(self, - sock, - default_window_size=DEFAULT_WINDOW_SIZE, - default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, - gss_kex=False, - gss_deleg_creds=True): + _active_check_timeout = 0.1 + + def __init__( + self, + sock, + default_window_size=DEFAULT_WINDOW_SIZE, + default_max_packet_size=DEFAULT_MAX_PACKET_SIZE, + gss_kex=False, + gss_deleg_creds=True, + disabled_algorithms=None, + server_sig_algs=True, + ): """ Create a new SSH session over an existing socket, or socket-like - object. This only creates the `.Transport` object; it doesn't begin the - SSH session yet. Use `connect` or `start_client` to begin a client + object. This only creates the `.Transport` object; it doesn't begin + the SSH session yet. Use `connect` or `start_client` to begin a client session, or `start_server` to begin a server session. If the object is not actually a socket, it must have the following @@ -275,16 +376,51 @@ def __init__(self, :param int default_max_packet_size: sets the default max packet size on the transport. (defaults to 32768) + :param bool gss_kex: + Whether to enable GSSAPI key exchange when GSSAPI is in play. + Default: ``False``. + :param bool gss_deleg_creds: + Whether to enable GSSAPI credential delegation when GSSAPI is in + play. Default: ``True``. + :param dict disabled_algorithms: + If given, must be a dictionary mapping algorithm type to an + iterable of algorithm identifiers, which will be disabled for the + lifetime of the transport. + + Keys should match the last word in the class' builtin algorithm + tuple attributes, such as ``"ciphers"`` to disable names within + ``_preferred_ciphers``; or ``"kex"`` to disable something defined + inside ``_preferred_kex``. Values should exactly match members of + the matching attribute. + + For example, if you need to disable + ``diffie-hellman-group16-sha512`` key exchange (perhaps because + your code talks to a server which implements it differently from + Paramiko), specify ``disabled_algorithms={"kex": + ["diffie-hellman-group16-sha512"]}``. + :param bool server_sig_algs: + Whether to send an extra message to compatible clients, in server + mode, with a list of supported pubkey algorithms. Default: + ``True``. .. versionchanged:: 1.15 Added the ``default_window_size`` and ``default_max_packet_size`` arguments. + .. versionchanged:: 1.15 + Added the ``gss_kex`` and ``gss_deleg_creds`` kwargs. + .. versionchanged:: 2.6 + Added the ``disabled_algorithms`` kwarg. + .. versionchanged:: 2.9 + Added the ``server_sig_algs`` kwarg. """ self.active = False + self.hostname = None + self.server_extensions = {} if isinstance(sock, string_types): # convert "host:port" into (host, port) - hl = sock.split(':', 1) + hl = sock.split(":", 1) + self.hostname = hl[0] if len(hl) == 1: sock = (hl[0], 22) else: @@ -292,11 +428,15 @@ def __init__(self, if type(sock) is tuple: # connect to the given (host, port) hostname, port = sock - reason = 'No suitable address family' - for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM): + self.hostname = hostname + reason = "No suitable address family" + addrinfos = socket.getaddrinfo( + hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM + ) + for family, socktype, proto, canonname, sockaddr in addrinfos: if socktype == socket.SOCK_STREAM: af = family - addr = sockaddr + # addr = sockaddr sock = socket.socket(af, socket.SOCK_STREAM) try: retry_on_signal(lambda: sock.connect((hostname, port))) @@ -306,25 +446,21 @@ def __init__(self, break else: raise SSHException( - 'Unable to connect to %s: %s' % (hostname, reason)) + "Unable to connect to {}: {}".format(hostname, reason) + ) # okay, normal socket-ish flow here... threading.Thread.__init__(self) self.setDaemon(True) self.sock = sock - # Python < 2.3 doesn't have the settimeout method - RogerB - try: - # we set the timeout so we can check self.active periodically to - # see if we should bail. socket.timeout exception is never - # propagated. - self.sock.settimeout(0.1) - except AttributeError: - pass + # we set the timeout so we can check self.active periodically to + # see if we should bail. socket.timeout exception is never propagated. + self.sock.settimeout(self._active_check_timeout) # negotiated crypto parameters self.packetizer = Packetizer(sock) - self.local_version = 'SSH-' + self._PROTO_ID + '-' + self._CLIENT_ID - self.remote_version = '' - self.local_cipher = self.remote_cipher = '' + self.local_version = "SSH-" + self._PROTO_ID + "-" + self._CLIENT_ID + self.remote_version = "" + self.local_cipher = self.remote_cipher = "" self.local_kex_init = self.remote_kex_init = None self.local_mac = self.remote_mac = None self.local_compression = self.remote_compression = None @@ -340,12 +476,7 @@ def __init__(self, self.gss_host = None if self.use_gss_kex: self.kexgss_ctxt = GSSAuth("gssapi-keyex", gss_deleg_creds) - self._preferred_kex = ('gss-gex-sha1-toWM5Slw5Ew8Mqkay+al2g==', - 'gss-group14-sha1-toWM5Slw5Ew8Mqkay+al2g==', - 'gss-group1-sha1-toWM5Slw5Ew8Mqkay+al2g==', - 'diffie-hellman-group-exchange-sha1', - 'diffie-hellman-group14-sha1', - 'diffie-hellman-group1-sha1') + self._preferred_kex = self._preferred_gsskex + self._preferred_kex # state used during negotiation self.kex_engine = None @@ -356,12 +487,13 @@ def __init__(self, self.in_kex = False self.authenticated = False self._expected_packet = tuple() - self.lock = threading.Lock() # synchronization (always higher level than write_lock) + # synchronization (always higher level than write_lock) + self.lock = threading.Lock() # tracking open channels self._channels = ChannelMap() - self.channel_events = {} # (id -> Event) - self.channels_seen = {} # (id -> True) + self.channel_events = {} # (id -> Event) + self.channels_seen = {} # (id -> True) self._channel_counter = 0 self.default_max_packet_size = default_max_packet_size self.default_window_size = default_window_size @@ -373,15 +505,23 @@ def __init__(self, self.clear_to_send = threading.Event() self.clear_to_send_lock = threading.Lock() self.clear_to_send_timeout = 30.0 - self.log_name = 'paramiko.transport' + self.log_name = "paramiko.transport" self.logger = util.get_logger(self.log_name) self.packetizer.set_log(self.logger) self.auth_handler = None - self.global_response = None # response Message from an arbitrary global request - self.completion_event = None # user-defined event callbacks - self.banner_timeout = 15 # how long (seconds) to wait for the SSH banner - self.handshake_timeout = 15 # how long (seconds) to wait for the handshake to finish after SSH banner sent. - + # response Message from an arbitrary global request + self.global_response = None + # user-defined event callbacks + self.completion_event = None + # how long (seconds) to wait for the SSH banner + self.banner_timeout = 15 + # how long (seconds) to wait for the handshake to finish after SSH + # banner sent. + self.handshake_timeout = 15 + # how long (seconds) to wait for the auth response. + self.auth_timeout = 30 + self.disabled_algorithms = disabled_algorithms or {} + self.server_sig_algs = server_sig_algs # server mode: self.server_mode = False @@ -391,24 +531,61 @@ def __init__(self, self.server_accept_cv = threading.Condition(self.lock) self.subsystem_table = {} + def _filter_algorithm(self, type_): + default = getattr(self, "_preferred_{}".format(type_)) + return tuple( + x + for x in default + if x not in self.disabled_algorithms.get(type_, []) + ) + + @property + def preferred_ciphers(self): + return self._filter_algorithm("ciphers") + + @property + def preferred_macs(self): + return self._filter_algorithm("macs") + + @property + def preferred_keys(self): + return self._filter_algorithm("keys") + + @property + def preferred_pubkeys(self): + return self._filter_algorithm("pubkeys") + + @property + def preferred_kex(self): + return self._filter_algorithm("kex") + + @property + def preferred_compression(self): + return self._filter_algorithm("compression") + def __repr__(self): """ Returns a string representation of this object, for debugging. """ - out = '= max_time + ): break def start_server(self, event=None, server=None): @@ -506,7 +713,7 @@ def start_server(self, event=None, server=None): be triggered. On failure, `is_active` will return ``False``. (Since 1.4) If ``event`` is ``None``, this method will not return until - negotation is done. On success, the method returns normally. + negotiation is done. On success, the method returns normally. Otherwise an SSHException is raised. After a successful negotiation, the client will need to authenticate. @@ -533,8 +740,9 @@ def start_server(self, event=None, server=None): an object used to perform authentication and create `channels <.Channel>` - :raises SSHException: if negotiation fails (and no ``event`` was passed - in) + :raises: + `.SSHException` -- if negotiation fails (and no ``event`` was + passed in) """ if server is None: server = ServerInterface() @@ -556,7 +764,7 @@ def start_server(self, event=None, server=None): e = self.get_exception() if e is not None: raise e - raise SSHException('Negotiation failed.') + raise SSHException("Negotiation failed.") if event.is_set(): break @@ -573,6 +781,12 @@ def add_server_key(self, key): the host key to add, usually an `.RSAKey` or `.DSSKey`. """ self.server_key_dict[key.get_name()] = key + # Handle SHA-2 extensions for RSA by ensuring that lookups into + # self.server_key_dict will yield this key for any of the algorithm + # names. + if isinstance(key, RSAKey): + self.server_key_dict["rsa-sha2-256"] = key + self.server_key_dict["rsa-sha2-512"] = key def get_server_key(self): """ @@ -623,7 +837,7 @@ def load_server_moduli(filename=None): """ Transport._modulus_pack = ModulusPack() # places to look for the openssh "moduli" file - file_list = ['/etc/ssh/moduli', '/usr/local/etc/moduli'] + file_list = ["/etc/ssh/moduli", "/usr/local/etc/moduli"] if filename is not None: file_list.insert(0, filename) for fn in file_list: @@ -656,12 +870,12 @@ def get_remote_server_key(self): string)``. You can get the same effect by calling `.PKey.get_name` for the key type, and ``str(key)`` for the key string. - :raises SSHException: if no session is currently active. + :raises: `.SSHException` -- if no session is currently active. :return: public key (`.PKey`) of the remote server """ if (not self.active) or (not self.initial_kex_done): - raise SSHException('No existing session') + raise SSHException("No existing session") return self.host_key def is_active(self): @@ -674,7 +888,9 @@ def is_active(self): """ return self.active - def open_session(self, window_size=None, max_packet_size=None, timeout=None): + def open_session( + self, window_size=None, max_packet_size=None, timeout=None + ): """ Request a new channel to the server, of type ``"session"``. This is just an alias for calling `open_channel` with an argument of @@ -691,7 +907,8 @@ def open_session(self, window_size=None, max_packet_size=None, timeout=None): :return: a new `.Channel` - :raises SSHException: if the request is rejected or the session ends + :raises: + `.SSHException` -- if the request is rejected or the session ends prematurely .. versionchanged:: 1.13.4/1.14.3/1.15.3 @@ -699,10 +916,12 @@ def open_session(self, window_size=None, max_packet_size=None, timeout=None): .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ - return self.open_channel('session', - window_size=window_size, - max_packet_size=max_packet_size, - timeout=timeout) + return self.open_channel( + "session", + window_size=window_size, + max_packet_size=max_packet_size, + timeout=timeout, + ) def open_x11_channel(self, src_addr=None): """ @@ -714,10 +933,11 @@ def open_x11_channel(self, src_addr=None): x11 port, ie. 6010) :return: a new `.Channel` - :raises SSHException: if the request is rejected or the session ends + :raises: + `.SSHException` -- if the request is rejected or the session ends prematurely """ - return self.open_channel('x11', src_addr=src_addr) + return self.open_channel("x11", src_addr=src_addr) def open_forward_agent_channel(self): """ @@ -728,29 +948,32 @@ def open_forward_agent_channel(self): :return: a new `.Channel` - :raises SSHException: + :raises: `.SSHException` -- if the request is rejected or the session ends prematurely """ - return self.open_channel('auth-agent@openssh.com') + return self.open_channel("auth-agent@openssh.com") def open_forwarded_tcpip_channel(self, src_addr, dest_addr): """ - Request a new channel back to the client, of type ``"forwarded-tcpip"``. + Request a new channel back to the client, of type ``forwarded-tcpip``. + This is used after a client has requested port forwarding, for sending incoming connections back to the client. :param src_addr: originator's address :param dest_addr: local (server) connected address """ - return self.open_channel('forwarded-tcpip', dest_addr, src_addr) + return self.open_channel("forwarded-tcpip", dest_addr, src_addr) - def open_channel(self, - kind, - dest_addr=None, - src_addr=None, - window_size=None, - max_packet_size=None, - timeout=None): + def open_channel( + self, + kind, + dest_addr=None, + src_addr=None, + window_size=None, + max_packet_size=None, + timeout=None, + ): """ Request a new channel to the server. `Channels <.Channel>` are socket-like objects used for the actual transfer of data across the @@ -779,14 +1002,15 @@ def open_channel(self, :return: a new `.Channel` on success - :raises SSHException: if the request is rejected, the session ends + :raises: + `.SSHException` -- if the request is rejected, the session ends prematurely or there is a timeout openning a channel .. versionchanged:: 1.15 Added the ``window_size`` and ``max_packet_size`` arguments. """ if not self.active: - raise SSHException('SSH session not active') + raise SSHException("SSH session not active") timeout = 3600 if timeout is None else timeout self.lock.acquire() try: @@ -799,12 +1023,12 @@ def open_channel(self, m.add_int(chanid) m.add_int(window_size) m.add_int(max_packet_size) - if (kind == 'forwarded-tcpip') or (kind == 'direct-tcpip'): + if (kind == "forwarded-tcpip") or (kind == "direct-tcpip"): m.add_string(dest_addr[0]) m.add_int(dest_addr[1]) m.add_string(src_addr[0]) m.add_int(src_addr[1]) - elif kind == 'x11': + elif kind == "x11": m.add_string(src_addr[0]) m.add_int(src_addr[1]) chan = Channel(chanid) @@ -822,18 +1046,18 @@ def open_channel(self, if not self.active: e = self.get_exception() if e is None: - e = SSHException('Unable to open channel.') + e = SSHException("Unable to open channel.") raise e if event.is_set(): break elif start_ts + timeout < time.time(): - raise SSHException('Timeout openning channel.') + raise SSHException("Timeout opening channel.") chan = self._channels.get(chanid) if chan is not None: return chan e = self.get_exception() if e is None: - e = SSHException('Unable to open channel.') + e = SSHException("Unable to open channel.") raise e def request_port_forward(self, address, port, handler=None): @@ -844,7 +1068,11 @@ def request_port_forward(self, address, port, handler=None): If a handler is given, that handler is called from a different thread whenever a forwarded connection arrives. The handler parameters are:: - handler(channel, (origin_addr, origin_port), (server_addr, server_port)) + handler( + channel, + (origin_addr, origin_port), + (server_addr, server_port), + ) where ``server_addr`` and ``server_port`` are the address and port that the server was listening on. @@ -862,21 +1090,26 @@ def request_port_forward(self, address, port, handler=None): :return: the port number (`int`) allocated by the server - :raises SSHException: if the server refused the TCP forward request + :raises: + `.SSHException` -- if the server refused the TCP forward request """ if not self.active: - raise SSHException('SSH session not active') + raise SSHException("SSH session not active") port = int(port) - response = self.global_request('tcpip-forward', (address, port), wait=True) + response = self.global_request( + "tcpip-forward", (address, port), wait=True + ) if response is None: - raise SSHException('TCP forwarding request denied') + raise SSHException("TCP forwarding request denied") if port == 0: port = response.get_int() if handler is None: + def default_handler(channel, src_addr, dest_addr_port): - #src_addr, src_port = src_addr_port - #dest_addr, dest_port = dest_addr_port + # src_addr, src_port = src_addr_port + # dest_addr, dest_port = dest_addr_port self._queue_incoming_channel(channel) + handler = default_handler self._tcp_handler = handler return port @@ -893,7 +1126,7 @@ def cancel_port_forward(self, address, port): if not self.active: return self._tcp_handler = None - self.global_request('cancel-tcpip-forward', (address, port), wait=True) + self.global_request("cancel-tcpip-forward", (address, port), wait=True) def open_sftp_client(self): """ @@ -934,8 +1167,9 @@ def renegotiate_keys(self): traffic both ways as the two sides swap keys and do computations. This method returns when the session has switched to new keys. - :raises SSHException: if the key renegotiation failed (which causes the - session to end) + :raises: + `.SSHException` -- if the key renegotiation failed (which causes + the session to end) """ self.completion_event = threading.Event() self._send_kex_init() @@ -945,7 +1179,7 @@ def renegotiate_keys(self): e = self.get_exception() if e is not None: raise e - raise SSHException('Negotiation failed.') + raise SSHException("Negotiation failed.") if self.completion_event.is_set(): break return @@ -961,8 +1195,11 @@ def set_keepalive(self, interval): seconds to wait before sending a keepalive packet (or 0 to disable keepalives). """ - self.packetizer.set_keepalive(interval, - lambda x=weakref.proxy(self): x.global_request('keepalive@lag.net', wait=False)) + + def _request(x=weakref.proxy(self)): + return x.global_request("keepalive@lag.net", wait=False) + + self.packetizer.set_keepalive(interval, _request) def global_request(self, kind, data=None, wait=True): """ @@ -989,7 +1226,7 @@ def global_request(self, kind, data=None, wait=True): m.add_boolean(wait) if data is not None: m.add(*data) - self._log(DEBUG, 'Sending global request "%s"' % kind) + self._log(DEBUG, 'Sending global request "{}"'.format(kind)) self._send_user_message(m) if not wait: return None @@ -1004,8 +1241,8 @@ def global_request(self, kind, data=None, wait=True): def accept(self, timeout=None): """ Return the next channel opened by the client over this transport, in - server mode. If no channel is opened before the given timeout, ``None`` - is returned. + server mode. If no channel is opened before the given timeout, + ``None`` is returned. :param int timeout: seconds to wait for a channel, or ``None`` to wait forever @@ -1026,8 +1263,18 @@ def accept(self, timeout=None): self.lock.release() return chan - def connect(self, hostkey=None, username='', password=None, pkey=None, - gss_host=None, gss_auth=False, gss_kex=False, gss_deleg_creds=True): + def connect( + self, + hostkey=None, + username="", + password=None, + pkey=None, + gss_host=None, + gss_auth=False, + gss_kex=False, + gss_deleg_creds=True, + gss_trust_dns=True, + ): """ Negotiate an SSH2 session, and optionally verify the server's host key and authenticate using a password or private key. This is a shortcut @@ -1065,12 +1312,35 @@ def connect(self, hostkey=None, username='', password=None, pkey=None, Perform GSS-API Key Exchange and user authentication. :param bool gss_deleg_creds: Whether to delegate GSS-API client credentials. + :param gss_trust_dns: + Indicates whether or not the DNS is trusted to securely + canonicalize the name of the host being connected to (default + ``True``). - :raises SSHException: if the SSH2 negotiation fails, the host key + :raises: `.SSHException` -- if the SSH2 negotiation fails, the host key supplied by the server is incorrect, or authentication fails. + + .. versionchanged:: 2.3 + Added the ``gss_trust_dns`` argument. """ if hostkey is not None: - self._preferred_keys = [hostkey.get_name()] + # TODO: a more robust implementation would be to ask each key class + # for its nameS plural, and just use that. + # TODO: that could be used in a bunch of other spots too + if isinstance(hostkey, RSAKey): + self._preferred_keys = [ + "rsa-sha2-512", + "rsa-sha2-256", + "ssh-rsa", + ] + else: + self._preferred_keys = [hostkey.get_name()] + + self.set_gss_host( + gss_host=gss_host, + trust_dns=gss_trust_dns, + gssapi_requested=gss_kex or gss_auth, + ) self.start_client() @@ -1079,25 +1349,44 @@ def connect(self, hostkey=None, username='', password=None, pkey=None, # the host key. if (hostkey is not None) and not gss_kex: key = self.get_remote_server_key() - if (key.get_name() != hostkey.get_name()) or (key.asbytes() != hostkey.asbytes()): - self._log(DEBUG, 'Bad host key from server') - self._log(DEBUG, 'Expected: %s: %s' % (hostkey.get_name(), repr(hostkey.asbytes()))) - self._log(DEBUG, 'Got : %s: %s' % (key.get_name(), repr(key.asbytes()))) - raise SSHException('Bad host key from server') - self._log(DEBUG, 'Host key verified (%s)' % hostkey.get_name()) + if ( + key.get_name() != hostkey.get_name() + or key.asbytes() != hostkey.asbytes() + ): + self._log(DEBUG, "Bad host key from server") + self._log( + DEBUG, + "Expected: {}: {}".format( + hostkey.get_name(), repr(hostkey.asbytes()) + ), + ) + self._log( + DEBUG, + "Got : {}: {}".format( + key.get_name(), repr(key.asbytes()) + ), + ) + raise SSHException("Bad host key from server") + self._log( + DEBUG, "Host key verified ({})".format(hostkey.get_name()) + ) if (pkey is not None) or (password is not None) or gss_auth or gss_kex: if gss_auth: - self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-with-mic)') - self.auth_gssapi_with_mic(username, gss_host, gss_deleg_creds) + self._log( + DEBUG, "Attempting GSS-API auth... (gssapi-with-mic)" + ) # noqa + self.auth_gssapi_with_mic( + username, self.gss_host, gss_deleg_creds + ) elif gss_kex: - self._log(DEBUG, 'Attempting GSS-API auth... (gssapi-keyex)') + self._log(DEBUG, "Attempting GSS-API auth... (gssapi-keyex)") self.auth_gssapi_keyex(username) elif pkey is not None: - self._log(DEBUG, 'Attempting public-key auth...') + self._log(DEBUG, "Attempting public-key auth...") self.auth_publickey(username, pkey) else: - self._log(DEBUG, 'Attempting password auth...') + self._log(DEBUG, "Attempting password auth...") self.auth_password(username, password) return @@ -1133,7 +1422,7 @@ def set_subsystem_handler(self, name, handler, *larg, **kwarg): passed to the `.SubsystemHandler` constructor later. :param str name: name of the subsystem. - :param class handler: + :param handler: subclass of `.SubsystemHandler` that handles this subsystem. """ try: @@ -1151,7 +1440,11 @@ def is_authenticated(self): successfully; False if authentication failed and/or the session is closed. """ - return self.active and (self.auth_handler is not None) and self.auth_handler.is_authenticated() + return ( + self.active + and self.auth_handler is not None + and self.auth_handler.is_authenticated() + ) def get_username(self): """ @@ -1187,18 +1480,20 @@ def auth_none(self, username): :param str username: the username to authenticate as :return: - `list` of auth types permissible for the next stage of + list of auth types permissible for the next stage of authentication (normally empty) - :raises BadAuthenticationType: if "none" authentication isn't allowed + :raises: + `.BadAuthenticationType` -- if "none" authentication isn't allowed by the server for this user - :raises SSHException: if the authentication failed due to a network + :raises: + `.SSHException` -- if the authentication failed due to a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): - raise SSHException('No existing session') + raise SSHException("No existing session") my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_none(username, my_event) @@ -1240,18 +1535,21 @@ def auth_password(self, username, password, event=None, fallback=True): ``True`` if an attempt at an automated "interactive" password auth should be made if the server doesn't support normal password auth :return: - `list` of auth types permissible for the next stage of + list of auth types permissible for the next stage of authentication (normally empty) - :raises BadAuthenticationType: if password authentication isn't + :raises: + `.BadAuthenticationType` -- if password authentication isn't allowed by the server for this user (and no event was passed in) - :raises AuthenticationException: if the authentication failed (and no + :raises: + `.AuthenticationException` -- if the authentication failed (and no event was passed in) - :raises SSHException: if there was a network error + :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): - # we should never try to send the password unless we're on a secure link - raise SSHException('No existing session') + # we should never try to send the password unless we're on a secure + # link + raise SSHException("No existing session") if event is None: my_event = threading.Event() else: @@ -1264,13 +1562,15 @@ def auth_password(self, username, password, event=None, fallback=True): try: return self.auth_handler.wait_for_response(my_event) except BadAuthenticationType as e: - # if password auth isn't allowed, but keyboard-interactive *is*, try to fudge it - if not fallback or ('keyboard-interactive' not in e.allowed_types): + # if password auth isn't allowed, but keyboard-interactive *is*, + # try to fudge it + if not fallback or ("keyboard-interactive" not in e.allowed_types): raise try: + def handler(title, instructions, fields): if len(fields) > 1: - raise SSHException('Fallback authentication failed.') + raise SSHException("Fallback authentication failed.") if len(fields) == 0: # for some reason, at least on os x, a 2nd request will # be made with zero fields requested. maybe it's just @@ -1278,6 +1578,7 @@ def handler(title, instructions, fields): # type we're doing here. *shrug* :) return [] return [password] + return self.auth_interactive(username, handler) except SSHException: # attempt failed; just raise the original exception @@ -1307,18 +1608,20 @@ def auth_publickey(self, username, key, event=None): an event to trigger when the authentication attempt is complete (whether it was successful or not) :return: - `list` of auth types permissible for the next stage of + list of auth types permissible for the next stage of authentication (normally empty) - :raises BadAuthenticationType: if public-key authentication isn't + :raises: + `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user (and no event was passed in) - :raises AuthenticationException: if the authentication failed (and no + :raises: + `.AuthenticationException` -- if the authentication failed (and no event was passed in) - :raises SSHException: if there was a network error + :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link - raise SSHException('No existing session') + raise SSHException("No existing session") if event is None: my_event = threading.Event() else: @@ -1330,7 +1633,7 @@ def auth_publickey(self, username, key, event=None): return [] return self.auth_handler.wait_for_response(my_event) - def auth_interactive(self, username, handler, submethods=''): + def auth_interactive(self, username, handler, submethods=""): """ Authenticate to the server interactively. A handler is used to answer arbitrary questions from the server. On many servers, this is just a @@ -1363,25 +1666,27 @@ def auth_interactive(self, username, handler, submethods=''): :param callable handler: a handler for responding to server questions :param str submethods: a string list of desired submethods (optional) :return: - `list` of auth types permissible for the next stage of + list of auth types permissible for the next stage of authentication (normally empty). - :raises BadAuthenticationType: if public-key authentication isn't + :raises: `.BadAuthenticationType` -- if public-key authentication isn't allowed by the server for this user - :raises AuthenticationException: if the authentication failed - :raises SSHException: if there was a network error + :raises: `.AuthenticationException` -- if the authentication failed + :raises: `.SSHException` -- if there was a network error .. versionadded:: 1.5 """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link - raise SSHException('No existing session') + raise SSHException("No existing session") my_event = threading.Event() self.auth_handler = AuthHandler(self) - self.auth_handler.auth_interactive(username, handler, my_event, submethods) + self.auth_handler.auth_interactive( + username, handler, my_event, submethods + ) return self.auth_handler.wait_for_response(my_event) - def auth_interactive_dumb(self, username, handler=None, submethods=''): + def auth_interactive_dumb(self, username, handler=None, submethods=""): """ Autenticate to the server interactively but dumber. Just print the prompt and / or instructions to stdout and send back @@ -1390,16 +1695,18 @@ def auth_interactive_dumb(self, username, handler=None, submethods=''): """ if not handler: + def handler(title, instructions, prompt_list): answers = [] if title: print(title.strip()) if instructions: print(instructions.strip()) - for prompt,show_input in prompt_list: - print(prompt.strip(),end=' ') + for prompt, show_input in prompt_list: + print(prompt.strip(), end=" ") answers.append(input()) return answers + return self.auth_interactive(username, handler, submethods) def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): @@ -1411,41 +1718,41 @@ def auth_gssapi_with_mic(self, username, gss_host, gss_deleg_creds): :param bool gss_deleg_creds: Delegate credentials or not :return: list of auth types permissible for the next stage of authentication (normally empty) - :rtype: list - :raise BadAuthenticationType: if gssapi-with-mic isn't + :raises: `.BadAuthenticationType` -- if gssapi-with-mic isn't allowed by the server (and no event was passed in) - :raise AuthenticationException: if the authentication failed (and no + :raises: + `.AuthenticationException` -- if the authentication failed (and no event was passed in) - :raise SSHException: if there was a network error + :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link - raise SSHException('No existing session') + raise SSHException("No existing session") my_event = threading.Event() self.auth_handler = AuthHandler(self) - self.auth_handler.auth_gssapi_with_mic(username, gss_host, gss_deleg_creds, my_event) + self.auth_handler.auth_gssapi_with_mic( + username, gss_host, gss_deleg_creds, my_event + ) return self.auth_handler.wait_for_response(my_event) def auth_gssapi_keyex(self, username): """ - Authenticate to the Server with GSS-API / SSPI if GSS-API Key Exchange - was the used key exchange method. + Authenticate to the server with GSS-API/SSPI if GSS-API kex is in use. - :param str username: The username to authenticate as - :param str gss_host: The target host - :param bool gss_deleg_creds: Delegate credentials or not - :return: list of auth types permissible for the next stage of - authentication (normally empty) - :rtype: list - :raise BadAuthenticationType: if GSS-API Key Exchange was not performed - (and no event was passed in) - :raise AuthenticationException: if the authentication failed (and no - event was passed in) - :raise SSHException: if there was a network error + :param str username: The username to authenticate as. + :returns: + a list of auth types permissible for the next stage of + authentication (normally empty) + :raises: `.BadAuthenticationType` -- + if GSS-API Key Exchange was not performed (and no event was passed + in) + :raises: `.AuthenticationException` -- + if the authentication failed (and no event was passed in) + :raises: `.SSHException` -- if there was a network error """ if (not self.active) or (not self.initial_kex_done): # we should never try to authenticate unless we're on a secure link - raise SSHException('No existing session') + raise SSHException("No existing session") my_event = threading.Event() self.auth_handler = AuthHandler(self) self.auth_handler.auth_gssapi_keyex(username, my_event) @@ -1512,24 +1819,25 @@ def use_compression(self, compress=True): .. versionadded:: 1.5.2 """ if compress: - self._preferred_compression = ('zlib@openssh.com', 'zlib', 'none') + self._preferred_compression = ("zlib@openssh.com", "zlib", "none") else: - self._preferred_compression = ('none',) + self._preferred_compression = ("none",) def getpeername(self): """ Return the address of the remote side of this Transport, if possible. - This is effectively a wrapper around ``'getpeername'`` on the underlying - socket. If the socket-like object has no ``'getpeername'`` method, - then ``("unknown", 0)`` is returned. + + This is effectively a wrapper around ``getpeername`` on the underlying + socket. If the socket-like object has no ``getpeername`` method, then + ``("unknown", 0)`` is returned. :return: the address of the remote host, if known, as a ``(str, int)`` tuple. """ - gp = getattr(self.sock, 'getpeername', None) + gp = getattr(self.sock, "getpeername", None) if gp is None: - return 'unknown', 0 + return "unknown", 0 return gp() def stop_thread(self): @@ -1548,12 +1856,14 @@ def stop_thread(self): # our socket and packetizer are both closed (but where we'd # otherwise be sitting forever on that recv()). while ( - self.is_alive() and self is not threading.current_thread() - and not self.sock._closed and not self.packetizer.closed + self.is_alive() + and self is not threading.current_thread() + and not self.sock._closed + and not self.packetizer.closed ): self.join(0.1) - ### internals... + # internals... def _log(self, level, msg, *args): if issubclass(type(msg), list): @@ -1591,36 +1901,48 @@ def _send_user_message(self, data): while True: self.clear_to_send.wait(0.1) if not self.active: - self._log(DEBUG, 'Dropping user packet because connection is dead.') + self._log( + DEBUG, "Dropping user packet because connection is dead." + ) # noqa return self.clear_to_send_lock.acquire() if self.clear_to_send.is_set(): break self.clear_to_send_lock.release() if time.time() > start + self.clear_to_send_timeout: - raise SSHException('Key-exchange timed out waiting for key negotiation') + raise SSHException( + "Key-exchange timed out waiting for key negotiation" + ) # noqa try: self._send_message(data) finally: self.clear_to_send_lock.release() def _set_K_H(self, k, h): - """used by a kex object to set the K (root key) and H (exchange hash)""" + """ + Used by a kex obj to set the K (root key) and H (exchange hash). + """ self.K = k self.H = h if self.session_id is None: self.session_id = h def _expect_packet(self, *ptypes): - """used by a kex object to register the next packet type it expects to see""" + """ + Used by a kex obj to register the next packet type it expects to see. + """ self._expected_packet = tuple(ptypes) def _verify_key(self, host_key, sig): key = self._key_info[self.host_key_type](Message(host_key)) if key is None: - raise SSHException('Unknown host key type') + raise SSHException("Unknown host key type") if not key.verify_ssh_sig(self.H, Message(sig)): - raise SSHException('Signature verification (%s) failed.' % self.host_key_type) + raise SSHException( + "Signature verification ({}) failed.".format( + self.host_key_type + ) + ) # noqa self.host_key = key def _compute_key(self, id, nbytes): @@ -1632,14 +1954,16 @@ def _compute_key(self, id, nbytes): m.add_bytes(self.session_id) # Fallback to SHA1 for kex engines that fail to specify a hex # algorithm, or for e.g. transport tests that don't run kexinit. - hash_algo = getattr(self.kex_engine, 'hash_algo', None) - hash_select_msg = "kex engine %s specified hash_algo %r" % (self.kex_engine.__class__.__name__, hash_algo) + hash_algo = getattr(self.kex_engine, "hash_algo", None) + hash_select_msg = "kex engine {} specified hash_algo {!r}".format( + self.kex_engine.__class__.__name__, hash_algo + ) if hash_algo is None: hash_algo = sha1 hash_select_msg += ", falling back to sha1" - if not hasattr(self, '_logged_hash_selection'): + if not hasattr(self, "_logged_hash_selection"): self._log(DEBUG, hash_select_msg) - setattr(self, '_logged_hash_selection', True) + setattr(self, "_logged_hash_selection", True) out = sofar = hash_algo(m.asbytes()).digest() while len(out) < nbytes: m = Message() @@ -1653,26 +1977,11 @@ def _compute_key(self, id, nbytes): def _get_cipher(self, name, key, iv, operation): if name not in self._cipher_info: - raise SSHException('Unknown client cipher ' + name) - if name in ('arcfour128', 'arcfour256'): - # arcfour cipher - cipher = Cipher( - self._cipher_info[name]['class'](key), - None, - backend=default_backend() - ) - if operation is self._ENCRYPT: - engine = cipher.encryptor() - else: - engine = cipher.decryptor() - # as per RFC 4345, the first 1536 bytes of keystream - # generated by the cipher MUST be discarded - engine.encrypt(" " * 1536) - return engine + raise SSHException("Unknown client cipher " + name) else: cipher = Cipher( - self._cipher_info[name]['class'](key), - self._cipher_info[name]['mode'](iv), + self._cipher_info[name]["class"](key), + self._cipher_info[name]["mode"](iv), backend=default_backend(), ) if operation is self._ENCRYPT: @@ -1682,8 +1991,10 @@ def _get_cipher(self, name, key, iv, operation): def _set_forward_agent_handler(self, handler): if handler is None: + def default_handler(channel): self._queue_incoming_channel(channel) + self._forward_agent_handler = default_handler else: self._forward_agent_handler = handler @@ -1694,6 +2005,7 @@ def _set_x11_handler(self, handler): # by default, use the same mechanism as accept() def default_handler(channel, src_addr_port): self._queue_incoming_channel(channel) + self._x11_handler = default_handler else: self._x11_handler = handler @@ -1716,6 +2028,43 @@ def _sanitize_packet_size(self, max_packet_size): max_packet_size = self.default_max_packet_size return clamp_value(MIN_PACKET_SIZE, max_packet_size, MAX_WINDOW_SIZE) + def _ensure_authed(self, ptype, message): + """ + Checks message type against current auth state. + + If server mode, and auth has not succeeded, and the message is of a + post-auth type (channel open or global request) an appropriate error + response Message is crafted and returned to caller for sending. + + Otherwise (client mode, authed, or pre-auth message) returns None. + """ + if ( + not self.server_mode + or ptype <= HIGHEST_USERAUTH_MESSAGE_ID + or self.is_authenticated() + ): + return None + # WELP. We must be dealing with someone trying to do non-auth things + # without being authed. Tell them off, based on message class. + reply = Message() + # Global requests have no details, just failure. + if ptype == MSG_GLOBAL_REQUEST: + reply.add_byte(cMSG_REQUEST_FAILURE) + # Channel opens let us reject w/ a specific type + message. + elif ptype == MSG_CHANNEL_OPEN: + kind = message.get_text() # noqa + chanid = message.get_int() + reply.add_byte(cMSG_CHANNEL_OPEN_FAILURE) + reply.add_int(chanid) + reply.add_int(OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED) + reply.add_string("") + reply.add_string("en") + # NOTE: Post-open channel messages do not need checking; the above will + # reject attemps to open channels, meaning that even if a malicious + # user tries to send a MSG_CHANNEL_REQUEST, it will simply fall under + # the logic that handles unknown channel IDs (as the channel list will + # be empty.) + return reply def run(self): # (use the exposed "run" method, because if we specify a thread target @@ -1729,14 +2078,18 @@ def run(self): # active=True occurs before the thread is launched, to avoid a race _active_threads.append(self) + tid = hex(long(id(self)) & xffffffff) if self.server_mode: - self._log(DEBUG, 'starting thread (server mode): %s' % hex(long(id(self)) & xffffffff)) + self._log(DEBUG, "starting thread (server mode): {}".format(tid)) else: - self._log(DEBUG, 'starting thread (client mode): %s' % hex(long(id(self)) & xffffffff)) + self._log(DEBUG, "starting thread (client mode): {}".format(tid)) try: try: - self.packetizer.write_all(b(self.local_version + '\r\n')) - self._log(DEBUG, 'Local version/idstring: %s' % self.local_version) + self.packetizer.write_all(b(self.local_version + "\r\n")) + self._log( + DEBUG, + "Local version/idstring: {}".format(self.local_version), + ) # noqa self._check_banner() # The above is actually very much part of the handshake, but # sometimes the banner can be read but the machine is not @@ -1760,62 +2113,96 @@ def run(self): continue elif ptype == MSG_DISCONNECT: self._parse_disconnect(m) - self.active = False - self.packetizer.close() break elif ptype == MSG_DEBUG: self._parse_debug(m) continue if len(self._expected_packet) > 0: if ptype not in self._expected_packet: - raise SSHException('Expecting packet from %r, got %d' % (self._expected_packet, ptype)) + raise SSHException( + "Expecting packet from {!r}, got {:d}".format( + self._expected_packet, ptype + ) + ) # noqa self._expected_packet = tuple() if (ptype >= 30) and (ptype <= 41): self.kex_engine.parse_next(ptype, m) continue if ptype in self._handler_table: - self._handler_table[ptype](self, m) + error_msg = self._ensure_authed(ptype, m) + if error_msg: + self._send_message(error_msg) + else: + self._handler_table[ptype](self, m) elif ptype in self._channel_handler_table: chanid = m.get_int() chan = self._channels.get(chanid) if chan is not None: self._channel_handler_table[ptype](chan, m) elif chanid in self.channels_seen: - self._log(DEBUG, 'Ignoring message for dead channel %d' % chanid) + self._log( + DEBUG, + "Ignoring message for dead channel {:d}".format( # noqa + chanid + ), + ) else: - self._log(ERROR, 'Channel request for unknown channel %d' % chanid) - self.active = False - self.packetizer.close() - elif (self.auth_handler is not None) and (ptype in self.auth_handler._handler_table): - self.auth_handler._handler_table[ptype](self.auth_handler, m) + self._log( + ERROR, + "Channel request for unknown channel {:d}".format( # noqa + chanid + ), + ) + break + elif ( + self.auth_handler is not None + and ptype in self.auth_handler._handler_table + ): + handler = self.auth_handler._handler_table[ptype] + handler(self.auth_handler, m) + if len(self._expected_packet) > 0: + continue else: - self._log(WARNING, 'Oops, unhandled type %d' % ptype) - msg = Message() - msg.add_byte(cMSG_UNIMPLEMENTED) - msg.add_int(m.seqno) - self._send_message(msg) + # Respond with "I don't implement this particular + # message type" message (unless the message type was + # itself literally MSG_UNIMPLEMENTED, in which case, we + # just shut up to avoid causing a useless loop). + name = MSG_NAMES[ptype] + warning = "Oops, unhandled type {} ({!r})".format( + ptype, name + ) + self._log(WARNING, warning) + if ptype != MSG_UNIMPLEMENTED: + msg = Message() + msg.add_byte(cMSG_UNIMPLEMENTED) + msg.add_int(m.seqno) + self._send_message(msg) self.packetizer.complete_handshake() except SSHException as e: - self._log(ERROR, 'Exception: ' + str(e)) + self._log( + ERROR, + "Exception ({}): {}".format( + "server" if self.server_mode else "client", e + ), + ) self._log(ERROR, util.tb_strings()) self.saved_exception = e except EOFError as e: - self._log(DEBUG, 'EOF in transport thread') - #self._log(DEBUG, util.tb_strings()) + self._log(DEBUG, "EOF in transport thread") self.saved_exception = e except socket.error as e: if type(e.args) is tuple: if e.args: - emsg = '%s (%d)' % (e.args[1], e.args[0]) + emsg = "{} ({:d})".format(e.args[1], e.args[0]) else: # empty tuple, e.g. socket.timeout emsg = str(e) or repr(e) else: emsg = e.args - self._log(ERROR, 'Socket exception: ' + emsg) + self._log(ERROR, "Socket exception: " + emsg) self.saved_exception = e except Exception as e: - self._log(ERROR, 'Unknown exception: ' + str(e)) + self._log(ERROR, "Unknown exception: " + str(e)) self._log(ERROR, util.tb_strings()) self.saved_exception = e _active_threads.remove(self) @@ -1844,19 +2231,18 @@ def run(self): if self.sys.modules is not None: raise - def _log_agreement(self, which, local, remote): # Log useful, non-duplicative line re: an agreed-upon algorithm. # Old code implied algorithms could be asymmetrical (different for # inbound vs outbound) so we preserve that possibility. - msg = "{0} agreed: ".format(which) + msg = "{}: ".format(which) if local == remote: msg += local else: - msg += "local={0}, remote={1}".format(local, remote) + msg += "local={}, remote={}".format(local, remote) self._log(DEBUG, msg) - ### protocol stages + # protocol stages def _negotiate_keys(self, m): # throws SSHException on anything unusual @@ -1885,30 +2271,34 @@ def _check_banner(self): except ProxyCommandFailure: raise except Exception as e: - raise SSHException('Error reading SSH protocol banner' + str(e)) - if buf[:4] == 'SSH-': + raise SSHException( + "Error reading SSH protocol banner" + str(e) + ) + if buf[:4] == "SSH-": break - self._log(DEBUG, 'Banner: ' + buf) - if buf[:4] != 'SSH-': + self._log(DEBUG, "Banner: " + buf) + if buf[:4] != "SSH-": raise SSHException('Indecipherable protocol version "' + buf + '"') # save this server version string for later self.remote_version = buf - self._log(DEBUG, 'Remote version/idstring: %s' % buf) + self._log(DEBUG, "Remote version/idstring: {}".format(buf)) # pull off any attached comment - comment = '' - i = buf.find(' ') + # NOTE: comment used to be stored in a variable and then...never used. + # since 2003. ca 877cd974b8182d26fa76d566072917ea67b64e67 + i = buf.find(" ") if i >= 0: - comment = buf[i+1:] buf = buf[:i] # parse out version string and make sure it matches - segs = buf.split('-', 2) + segs = buf.split("-", 2) if len(segs) < 3: - raise SSHException('Invalid SSH banner') + raise SSHException("Invalid SSH banner") version = segs[1] client = segs[2] - if version != '1.99' and version != '2.0': - raise SSHException('Incompatible version (%s instead of 2.0)' % (version,)) - self._log(INFO, 'Connected (version %s, client %s)' % (version, client)) + if version != "1.99" and version != "2.0": + msg = "Incompatible version ({} instead of 2.0)" + raise IncompatiblePeer(msg.format(version)) + msg = "Connected (version {}, client {})".format(version, client) + self._log(INFO, msg) def _send_kex_init(self): """ @@ -1920,143 +2310,283 @@ def _send_kex_init(self): self.clear_to_send.clear() finally: self.clear_to_send_lock.release() + self.gss_kex_used = False self.in_kex = True + kex_algos = list(self.preferred_kex) if self.server_mode: - mp_required_prefix = 'diffie-hellman-group-exchange-sha' - kex_mp = [k for k in self._preferred_kex if k.startswith(mp_required_prefix)] + mp_required_prefix = "diffie-hellman-group-exchange-sha" + kex_mp = [k for k in kex_algos if k.startswith(mp_required_prefix)] if (self._modulus_pack is None) and (len(kex_mp) > 0): - # can't do group-exchange if we don't have a pack of potential primes - pkex = [k for k in self.get_security_options().kex - if not k.startswith(mp_required_prefix)] + # can't do group-exchange if we don't have a pack of potential + # primes + pkex = [ + k + for k in self.get_security_options().kex + if not k.startswith(mp_required_prefix) + ] self.get_security_options().kex = pkex - available_server_keys = list(filter(list(self.server_key_dict.keys()).__contains__, - self._preferred_keys)) + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + # TODO: ensure tests will catch if somebody streamlines + # this by mistake - case is the admittedly silly one where + # the only calls to add_server_key() contain keys which + # were filtered out of the below via disabled_algorithms. + # If this is streamlined, we would then be allowing the + # disabled algorithm(s) for hostkey use + # TODO: honestly this prob just wants to get thrown out + # when we make kex configuration more straightforward + self.preferred_keys, + ) + ) else: - available_server_keys = self._preferred_keys + available_server_keys = self.preferred_keys + # Signal support for MSG_EXT_INFO. + # NOTE: doing this here handily means we don't even consider this + # value when agreeing on real kex algo to use (which is a common + # pitfall when adding this apparently). + kex_algos.append("ext-info-c") m = Message() m.add_byte(cMSG_KEXINIT) m.add_bytes(os.urandom(16)) - m.add_list(self._preferred_kex) + m.add_list(kex_algos) m.add_list(available_server_keys) - m.add_list(self._preferred_ciphers) - m.add_list(self._preferred_ciphers) - m.add_list(self._preferred_macs) - m.add_list(self._preferred_macs) - m.add_list(self._preferred_compression) - m.add_list(self._preferred_compression) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_ciphers) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_macs) + m.add_list(self.preferred_compression) + m.add_list(self.preferred_compression) m.add_string(bytes()) m.add_string(bytes()) m.add_boolean(False) m.add_int(0) # save a copy for later (needed to compute a hash) - self.local_kex_init = m.asbytes() + self.local_kex_init = self._latest_kex_init = m.asbytes() self._send_message(m) + def _really_parse_kex_init(self, m, ignore_first_byte=False): + parsed = {} + if ignore_first_byte: + m.get_byte() + m.get_bytes(16) # cookie, discarded + parsed["kex_algo_list"] = m.get_list() + parsed["server_key_algo_list"] = m.get_list() + parsed["client_encrypt_algo_list"] = m.get_list() + parsed["server_encrypt_algo_list"] = m.get_list() + parsed["client_mac_algo_list"] = m.get_list() + parsed["server_mac_algo_list"] = m.get_list() + parsed["client_compress_algo_list"] = m.get_list() + parsed["server_compress_algo_list"] = m.get_list() + parsed["client_lang_list"] = m.get_list() + parsed["server_lang_list"] = m.get_list() + parsed["kex_follows"] = m.get_boolean() + m.get_int() # unused + return parsed + + def _get_latest_kex_init(self): + return self._really_parse_kex_init( + Message(self._latest_kex_init), ignore_first_byte=True + ) + def _parse_kex_init(self, m): - cookie = m.get_bytes(16) - kex_algo_list = m.get_list() - server_key_algo_list = m.get_list() - client_encrypt_algo_list = m.get_list() - server_encrypt_algo_list = m.get_list() - client_mac_algo_list = m.get_list() - server_mac_algo_list = m.get_list() - client_compress_algo_list = m.get_list() - server_compress_algo_list = m.get_list() - client_lang_list = m.get_list() - server_lang_list = m.get_list() - kex_follows = m.get_boolean() - unused = m.get_int() - - self._log(DEBUG, 'kex algos:' + str(kex_algo_list) + ' server key:' + str(server_key_algo_list) + - ' client encrypt:' + str(client_encrypt_algo_list) + - ' server encrypt:' + str(server_encrypt_algo_list) + - ' client mac:' + str(client_mac_algo_list) + - ' server mac:' + str(server_mac_algo_list) + - ' client compress:' + str(client_compress_algo_list) + - ' server compress:' + str(server_compress_algo_list) + - ' client lang:' + str(client_lang_list) + - ' server lang:' + str(server_lang_list) + - ' kex follows?' + str(kex_follows)) + parsed = self._really_parse_kex_init(m) + kex_algo_list = parsed["kex_algo_list"] + server_key_algo_list = parsed["server_key_algo_list"] + client_encrypt_algo_list = parsed["client_encrypt_algo_list"] + server_encrypt_algo_list = parsed["server_encrypt_algo_list"] + client_mac_algo_list = parsed["client_mac_algo_list"] + server_mac_algo_list = parsed["server_mac_algo_list"] + client_compress_algo_list = parsed["client_compress_algo_list"] + server_compress_algo_list = parsed["server_compress_algo_list"] + client_lang_list = parsed["client_lang_list"] + server_lang_list = parsed["server_lang_list"] + kex_follows = parsed["kex_follows"] + + self._log(DEBUG, "=== Key exchange possibilities ===") + for prefix, value in ( + ("kex algos", kex_algo_list), + ("server key", server_key_algo_list), + # TODO: shouldn't these two lines say "cipher" to match usual + # terminology (including elsewhere in paramiko!)? + ("client encrypt", client_encrypt_algo_list), + ("server encrypt", server_encrypt_algo_list), + ("client mac", client_mac_algo_list), + ("server mac", server_mac_algo_list), + ("client compress", client_compress_algo_list), + ("server compress", server_compress_algo_list), + ("client lang", client_lang_list), + ("server lang", server_lang_list), + ): + if value == [""]: + value = [""] + value = ", ".join(value) + self._log(DEBUG, "{}: {}".format(prefix, value)) + self._log(DEBUG, "kex follows: {}".format(kex_follows)) + self._log(DEBUG, "=== Key exchange agreements ===") + + # Strip out ext-info "kex algo" + self._remote_ext_info = None + if kex_algo_list[-1].startswith("ext-info-"): + self._remote_ext_info = kex_algo_list.pop() # as a server, we pick the first item in the client's list that we # support. # as a client, we pick the first item in our list that the server # supports. if self.server_mode: - agreed_kex = list(filter( - self._preferred_kex.__contains__, - kex_algo_list - )) + agreed_kex = list( + filter(self.preferred_kex.__contains__, kex_algo_list) + ) else: - agreed_kex = list(filter( - kex_algo_list.__contains__, - self._preferred_kex - )) + agreed_kex = list( + filter(kex_algo_list.__contains__, self.preferred_kex) + ) if len(agreed_kex) == 0: - raise SSHException('Incompatible ssh peer (no acceptable kex algorithm)') + # TODO: do an auth-overhaul style aggregate exception here? + # TODO: would let us streamline log output & show all failures up + # front + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable kex algorithm)" + ) # noqa self.kex_engine = self._kex_info[agreed_kex[0]](self) - self._log(DEBUG, "Kex agreed: %s" % agreed_kex[0]) + self._log(DEBUG, "Kex: {}".format(agreed_kex[0])) if self.server_mode: - available_server_keys = list(filter(list(self.server_key_dict.keys()).__contains__, - self._preferred_keys)) - agreed_keys = list(filter(available_server_keys.__contains__, server_key_algo_list)) + available_server_keys = list( + filter( + list(self.server_key_dict.keys()).__contains__, + self.preferred_keys, + ) + ) + agreed_keys = list( + filter( + available_server_keys.__contains__, server_key_algo_list + ) + ) else: - agreed_keys = list(filter(server_key_algo_list.__contains__, self._preferred_keys)) + agreed_keys = list( + filter(server_key_algo_list.__contains__, self.preferred_keys) + ) if len(agreed_keys) == 0: - raise SSHException('Incompatible ssh peer (no acceptable host key)') + raise IncompatiblePeer( + "Incompatible ssh peer (no acceptable host key)" + ) # noqa self.host_key_type = agreed_keys[0] if self.server_mode and (self.get_server_key() is None): - raise SSHException('Incompatible ssh peer (can\'t match requested host key type)') + raise IncompatiblePeer( + "Incompatible ssh peer (can't match requested host key type)" + ) # noqa + self._log_agreement("HostKey", agreed_keys[0], agreed_keys[0]) if self.server_mode: - agreed_local_ciphers = list(filter(self._preferred_ciphers.__contains__, - server_encrypt_algo_list)) - agreed_remote_ciphers = list(filter(self._preferred_ciphers.__contains__, - client_encrypt_algo_list)) + agreed_local_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + server_encrypt_algo_list, + ) + ) + agreed_remote_ciphers = list( + filter( + self.preferred_ciphers.__contains__, + client_encrypt_algo_list, + ) + ) else: - agreed_local_ciphers = list(filter(client_encrypt_algo_list.__contains__, - self._preferred_ciphers)) - agreed_remote_ciphers = list(filter(server_encrypt_algo_list.__contains__, - self._preferred_ciphers)) - if (len(agreed_local_ciphers) == 0) or (len(agreed_remote_ciphers) == 0): - raise SSHException('Incompatible ssh server (no acceptable ciphers)') + agreed_local_ciphers = list( + filter( + client_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + agreed_remote_ciphers = list( + filter( + server_encrypt_algo_list.__contains__, + self.preferred_ciphers, + ) + ) + if len(agreed_local_ciphers) == 0 or len(agreed_remote_ciphers) == 0: + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable ciphers)" + ) # noqa self.local_cipher = agreed_local_ciphers[0] self.remote_cipher = agreed_remote_ciphers[0] self._log_agreement( - 'Cipher', local=self.local_cipher, remote=self.remote_cipher + "Cipher", local=self.local_cipher, remote=self.remote_cipher ) if self.server_mode: - agreed_remote_macs = list(filter(self._preferred_macs.__contains__, client_mac_algo_list)) - agreed_local_macs = list(filter(self._preferred_macs.__contains__, server_mac_algo_list)) + agreed_remote_macs = list( + filter(self.preferred_macs.__contains__, client_mac_algo_list) + ) + agreed_local_macs = list( + filter(self.preferred_macs.__contains__, server_mac_algo_list) + ) else: - agreed_local_macs = list(filter(client_mac_algo_list.__contains__, self._preferred_macs)) - agreed_remote_macs = list(filter(server_mac_algo_list.__contains__, self._preferred_macs)) + agreed_local_macs = list( + filter(client_mac_algo_list.__contains__, self.preferred_macs) + ) + agreed_remote_macs = list( + filter(server_mac_algo_list.__contains__, self.preferred_macs) + ) if (len(agreed_local_macs) == 0) or (len(agreed_remote_macs) == 0): - raise SSHException('Incompatible ssh server (no acceptable macs)') + raise IncompatiblePeer( + "Incompatible ssh server (no acceptable macs)" + ) self.local_mac = agreed_local_macs[0] self.remote_mac = agreed_remote_macs[0] self._log_agreement( - 'MAC', local=self.local_mac, remote=self.remote_mac + "MAC", local=self.local_mac, remote=self.remote_mac ) if self.server_mode: - agreed_remote_compression = list(filter(self._preferred_compression.__contains__, client_compress_algo_list)) - agreed_local_compression = list(filter(self._preferred_compression.__contains__, server_compress_algo_list)) + agreed_remote_compression = list( + filter( + self.preferred_compression.__contains__, + client_compress_algo_list, + ) + ) + agreed_local_compression = list( + filter( + self.preferred_compression.__contains__, + server_compress_algo_list, + ) + ) else: - agreed_local_compression = list(filter(client_compress_algo_list.__contains__, self._preferred_compression)) - agreed_remote_compression = list(filter(server_compress_algo_list.__contains__, self._preferred_compression)) - if (len(agreed_local_compression) == 0) or (len(agreed_remote_compression) == 0): - raise SSHException('Incompatible ssh server (no acceptable compression) %r %r %r' % (agreed_local_compression, agreed_remote_compression, self._preferred_compression)) + agreed_local_compression = list( + filter( + client_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + agreed_remote_compression = list( + filter( + server_compress_algo_list.__contains__, + self.preferred_compression, + ) + ) + if ( + len(agreed_local_compression) == 0 + or len(agreed_remote_compression) == 0 + ): + msg = "Incompatible ssh server (no acceptable compression)" + msg += " {!r} {!r} {!r}" + raise IncompatiblePeer( + msg.format( + agreed_local_compression, + agreed_remote_compression, + self.preferred_compression, + ) + ) self.local_compression = agreed_local_compression[0] self.remote_compression = agreed_remote_compression[0] self._log_agreement( - 'Compression', + "Compression", local=self.local_compression, - remote=self.remote_compression + remote=self.remote_compression, ) + self._log(DEBUG, "=== End of kex handshake ===") # save for computing hash later... # now wait! openssh has a bug (and others might too) where there are @@ -2066,75 +2596,127 @@ def _parse_kex_init(self, m): self.remote_kex_init = cMSG_KEXINIT + m.get_so_far() def _activate_inbound(self): - """switch on newly negotiated encryption parameters for inbound traffic""" - block_size = self._cipher_info[self.remote_cipher]['block-size'] + """switch on newly negotiated encryption parameters for + inbound traffic""" + block_size = self._cipher_info[self.remote_cipher]["block-size"] if self.server_mode: - IV_in = self._compute_key('A', block_size) - key_in = self._compute_key('C', self._cipher_info[self.remote_cipher]['key-size']) + IV_in = self._compute_key("A", block_size) + key_in = self._compute_key( + "C", self._cipher_info[self.remote_cipher]["key-size"] + ) else: - IV_in = self._compute_key('B', block_size) - key_in = self._compute_key('D', self._cipher_info[self.remote_cipher]['key-size']) - engine = self._get_cipher(self.remote_cipher, key_in, IV_in, self._DECRYPT) - mac_size = self._mac_info[self.remote_mac]['size'] - mac_engine = self._mac_info[self.remote_mac]['class'] + IV_in = self._compute_key("B", block_size) + key_in = self._compute_key( + "D", self._cipher_info[self.remote_cipher]["key-size"] + ) + engine = self._get_cipher( + self.remote_cipher, key_in, IV_in, self._DECRYPT + ) + etm = "etm@openssh.com" in self.remote_mac + mac_size = self._mac_info[self.remote_mac]["size"] + mac_engine = self._mac_info[self.remote_mac]["class"] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: - mac_key = self._compute_key('E', mac_engine().digest_size) + mac_key = self._compute_key("E", mac_engine().digest_size) else: - mac_key = self._compute_key('F', mac_engine().digest_size) - self.packetizer.set_inbound_cipher(engine, block_size, mac_engine, mac_size, mac_key) + mac_key = self._compute_key("F", mac_engine().digest_size) + self.packetizer.set_inbound_cipher( + engine, block_size, mac_engine, mac_size, mac_key, etm=etm + ) compress_in = self._compression_info[self.remote_compression][1] - if (compress_in is not None) and ((self.remote_compression != 'zlib@openssh.com') or self.authenticated): - self._log(DEBUG, 'Switching on inbound compression ...') + if compress_in is not None and ( + self.remote_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on inbound compression ...") self.packetizer.set_inbound_compressor(compress_in()) def _activate_outbound(self): - """switch on newly negotiated encryption parameters for outbound traffic""" + """switch on newly negotiated encryption parameters for + outbound traffic""" m = Message() m.add_byte(cMSG_NEWKEYS) self._send_message(m) - block_size = self._cipher_info[self.local_cipher]['block-size'] + block_size = self._cipher_info[self.local_cipher]["block-size"] if self.server_mode: - IV_out = self._compute_key('B', block_size) - key_out = self._compute_key('D', self._cipher_info[self.local_cipher]['key-size']) + IV_out = self._compute_key("B", block_size) + key_out = self._compute_key( + "D", self._cipher_info[self.local_cipher]["key-size"] + ) else: - IV_out = self._compute_key('A', block_size) - key_out = self._compute_key('C', self._cipher_info[self.local_cipher]['key-size']) - engine = self._get_cipher(self.local_cipher, key_out, IV_out, self._ENCRYPT) - mac_size = self._mac_info[self.local_mac]['size'] - mac_engine = self._mac_info[self.local_mac]['class'] + IV_out = self._compute_key("A", block_size) + key_out = self._compute_key( + "C", self._cipher_info[self.local_cipher]["key-size"] + ) + engine = self._get_cipher( + self.local_cipher, key_out, IV_out, self._ENCRYPT + ) + etm = "etm@openssh.com" in self.local_mac + mac_size = self._mac_info[self.local_mac]["size"] + mac_engine = self._mac_info[self.local_mac]["class"] # initial mac keys are done in the hash's natural size (not the # potentially truncated transmission size) if self.server_mode: - mac_key = self._compute_key('F', mac_engine().digest_size) + mac_key = self._compute_key("F", mac_engine().digest_size) else: - mac_key = self._compute_key('E', mac_engine().digest_size) - sdctr = self.local_cipher.endswith('-ctr') - self.packetizer.set_outbound_cipher(engine, block_size, mac_engine, mac_size, mac_key, sdctr) + mac_key = self._compute_key("E", mac_engine().digest_size) + sdctr = self.local_cipher.endswith("-ctr") + self.packetizer.set_outbound_cipher( + engine, block_size, mac_engine, mac_size, mac_key, sdctr, etm=etm + ) compress_out = self._compression_info[self.local_compression][0] - if (compress_out is not None) and ((self.local_compression != 'zlib@openssh.com') or self.authenticated): - self._log(DEBUG, 'Switching on outbound compression ...') + if compress_out is not None and ( + self.local_compression != "zlib@openssh.com" or self.authenticated + ): + self._log(DEBUG, "Switching on outbound compression ...") self.packetizer.set_outbound_compressor(compress_out()) if not self.packetizer.need_rekey(): self.in_kex = False + # If client indicated extension support, send that packet immediately + if ( + self.server_mode + and self.server_sig_algs + and self._remote_ext_info == "ext-info-c" + ): + extensions = {"server-sig-algs": ",".join(self.preferred_pubkeys)} + m = Message() + m.add_byte(cMSG_EXT_INFO) + m.add_int(len(extensions)) + for name, value in sorted(extensions.items()): + m.add_string(name) + m.add_string(value) + self._send_message(m) # we always expect to receive NEWKEYS now self._expect_packet(MSG_NEWKEYS) def _auth_trigger(self): self.authenticated = True # delayed initiation of compression - if self.local_compression == 'zlib@openssh.com': + if self.local_compression == "zlib@openssh.com": compress_out = self._compression_info[self.local_compression][0] - self._log(DEBUG, 'Switching on outbound compression ...') + self._log(DEBUG, "Switching on outbound compression ...") self.packetizer.set_outbound_compressor(compress_out()) - if self.remote_compression == 'zlib@openssh.com': + if self.remote_compression == "zlib@openssh.com": compress_in = self._compression_info[self.remote_compression][1] - self._log(DEBUG, 'Switching on inbound compression ...') + self._log(DEBUG, "Switching on inbound compression ...") self.packetizer.set_inbound_compressor(compress_in()) + def _parse_ext_info(self, msg): + # Packet is a count followed by that many key-string to possibly-bytes + # pairs. + extensions = {} + for _ in range(msg.get_int()): + name = msg.get_text() + value = msg.get_string() + extensions[name] = value + self._log(DEBUG, "Got EXT_INFO: {}".format(extensions)) + # NOTE: this should work ok in cases where a server sends /two/ such + # messages; the RFC explicitly states a 2nd one should overwrite the + # 1st. + self.server_extensions = extensions + def _parse_newkeys(self, m): - self._log(DEBUG, 'Switch to new keys ...') + self._log(DEBUG, "Switch to new keys ...") self._activate_inbound() # can also free a bunch of stuff here self.local_kex_init = self.remote_kex_init = None @@ -2162,22 +2744,25 @@ def _parse_newkeys(self, m): def _parse_disconnect(self, m): code = m.get_int() desc = m.get_text() - self._log(INFO, 'Disconnect (code %d): %s' % (code, desc)) + self._log(INFO, "Disconnect (code {:d}): {}".format(code, desc)) def _parse_global_request(self, m): kind = m.get_text() - self._log(DEBUG, 'Received global request "%s"' % kind) + self._log(DEBUG, 'Received global request "{}"'.format(kind)) want_reply = m.get_boolean() if not self.server_mode: - self._log(DEBUG, 'Rejecting "%s" global request from server.' % kind) + self._log( + DEBUG, + 'Rejecting "{}" global request from server.'.format(kind), + ) ok = False - elif kind == 'tcpip-forward': + elif kind == "tcpip-forward": address = m.get_text() port = m.get_int() ok = self.server_object.check_port_forward_request(address, port) if ok: ok = (ok,) - elif kind == 'cancel-tcpip-forward': + elif kind == "cancel-tcpip-forward": address = m.get_text() port = m.get_int() self.server_object.cancel_port_forward_request(address, port) @@ -2198,13 +2783,13 @@ def _parse_global_request(self, m): self._send_message(msg) def _parse_request_success(self, m): - self._log(DEBUG, 'Global request successful.') + self._log(DEBUG, "Global request successful.") self.global_response = m if self.completion_event is not None: self.completion_event.set() def _parse_request_failure(self, m): - self._log(DEBUG, 'Global request denied.') + self._log(DEBUG, "Global request denied.") self.global_response = None if self.completion_event is not None: self.completion_event.set() @@ -2216,12 +2801,14 @@ def _parse_channel_open_success(self, m): server_max_packet_size = m.get_int() chan = self._channels.get(chanid) if chan is None: - self._log(WARNING, 'Success for unrequested channel! [??]') + self._log(WARNING, "Success for unrequested channel! [??]") return self.lock.acquire() try: - chan._set_remote_channel(server_chanid, server_window_size, server_max_packet_size) - self._log(DEBUG, 'Secsh channel %d opened.' % chanid) + chan._set_remote_channel( + server_chanid, server_window_size, server_max_packet_size + ) + self._log(DEBUG, "Secsh channel {:d} opened.".format(chanid)) if chanid in self.channel_events: self.channel_events[chanid].set() del self.channel_events[chanid] @@ -2233,9 +2820,14 @@ def _parse_channel_open_failure(self, m): chanid = m.get_int() reason = m.get_int() reason_str = m.get_text() - lang = m.get_text() - reason_text = CONNECTION_FAILED_CODE.get(reason, '(unknown code)') - self._log(ERROR, 'Secsh channel %d open FAILED: %s: %s' % (chanid, reason_str, reason_text)) + m.get_text() # ignored language + reason_text = CONNECTION_FAILED_CODE.get(reason, "(unknown code)") + self._log( + ERROR, + "Secsh channel {:d} open FAILED: {}: {}".format( + chanid, reason_str, reason_text + ), + ) self.lock.acquire() try: self.saved_exception = ChannelException(reason, reason_text) @@ -2254,35 +2846,51 @@ def _parse_channel_open(self, m): initial_window_size = m.get_int() max_packet_size = m.get_int() reject = False - if (kind == 'auth-agent@openssh.com') and (self._forward_agent_handler is not None): - self._log(DEBUG, 'Incoming forward agent connection') + if ( + kind == "auth-agent@openssh.com" + and self._forward_agent_handler is not None + ): + self._log(DEBUG, "Incoming forward agent connection") self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() - elif (kind == 'x11') and (self._x11_handler is not None): + elif (kind == "x11") and (self._x11_handler is not None): origin_addr = m.get_text() origin_port = m.get_int() - self._log(DEBUG, 'Incoming x11 connection from %s:%d' % (origin_addr, origin_port)) + self._log( + DEBUG, + "Incoming x11 connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() - elif (kind == 'forwarded-tcpip') and (self._tcp_handler is not None): + elif (kind == "forwarded-tcpip") and (self._tcp_handler is not None): server_addr = m.get_text() server_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() - self._log(DEBUG, 'Incoming tcp forwarded connection from %s:%d' % (origin_addr, origin_port)) + self._log( + DEBUG, + "Incoming tcp forwarded connection from {}:{:d}".format( + origin_addr, origin_port + ), + ) self.lock.acquire() try: my_chanid = self._next_channel() finally: self.lock.release() elif not self.server_mode: - self._log(DEBUG, 'Rejecting "%s" channel request from server.' % kind) + self._log( + DEBUG, + 'Rejecting "{}" channel request from server.'.format(kind), + ) reject = True reason = OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED else: @@ -2291,26 +2899,34 @@ def _parse_channel_open(self, m): my_chanid = self._next_channel() finally: self.lock.release() - if kind == 'direct-tcpip': - # handle direct-tcpip requests comming from the client + if kind == "direct-tcpip": + # handle direct-tcpip requests coming from the client dest_addr = m.get_text() dest_port = m.get_int() origin_addr = m.get_text() origin_port = m.get_int() reason = self.server_object.check_channel_direct_tcpip_request( - my_chanid, (origin_addr, origin_port), (dest_addr, dest_port)) + my_chanid, + (origin_addr, origin_port), + (dest_addr, dest_port), + ) else: - reason = self.server_object.check_channel_request(kind, my_chanid) + reason = self.server_object.check_channel_request( + kind, my_chanid + ) if reason != OPEN_SUCCEEDED: - self._log(DEBUG, 'Rejecting "%s" channel request from client.' % kind) + self._log( + DEBUG, + 'Rejecting "{}" channel request from client.'.format(kind), + ) reject = True if reject: msg = Message() msg.add_byte(cMSG_CHANNEL_OPEN_FAILURE) msg.add_int(chanid) msg.add_int(reason) - msg.add_string('') - msg.add_string('en') + msg.add_string("") + msg.add_string("en") self._send_message(msg) return @@ -2320,8 +2936,12 @@ def _parse_channel_open(self, m): self._channels.put(my_chanid, chan) self.channels_seen[my_chanid] = True chan._set_transport(self) - chan._set_window(self.default_window_size, self.default_max_packet_size) - chan._set_remote_channel(chanid, initial_window_size, max_packet_size) + chan._set_window( + self.default_window_size, self.default_max_packet_size + ) + chan._set_remote_channel( + chanid, initial_window_size, max_packet_size + ) finally: self.lock.release() m = Message() @@ -2331,22 +2951,26 @@ def _parse_channel_open(self, m): m.add_int(self.default_window_size) m.add_int(self.default_max_packet_size) self._send_message(m) - self._log(DEBUG, 'Secsh channel %d (%s) opened.', my_chanid, kind) - if kind == 'auth-agent@openssh.com': + self._log( + DEBUG, "Secsh channel {:d} ({}) opened.".format(my_chanid, kind) + ) + if kind == "auth-agent@openssh.com": self._forward_agent_handler(chan) - elif kind == 'x11': + elif kind == "x11": self._x11_handler(chan, (origin_addr, origin_port)) - elif kind == 'forwarded-tcpip': + elif kind == "forwarded-tcpip": chan.origin_addr = (origin_addr, origin_port) - self._tcp_handler(chan, (origin_addr, origin_port), (server_addr, server_port)) + self._tcp_handler( + chan, (origin_addr, origin_port), (server_addr, server_port) + ) else: self._queue_incoming_channel(chan) def _parse_debug(self, m): - always_display = m.get_boolean() + m.get_boolean() # always_display msg = m.get_string() - lang = m.get_string() - self._log(DEBUG, 'Debug msg: {0}'.format(util.safe_string(msg))) + m.get_string() # language + self._log(DEBUG, "Debug msg: {}".format(util.safe_string(msg))) def _get_subsystem_handler(self, name): try: @@ -2358,6 +2982,7 @@ def _get_subsystem_handler(self, name): self.lock.release() _handler_table = { + MSG_EXT_INFO: _parse_ext_info, MSG_NEWKEYS: _parse_newkeys, MSG_GLOBAL_REQUEST: _parse_global_request, MSG_REQUEST_SUCCESS: _parse_request_success, @@ -2380,7 +3005,10 @@ def _get_subsystem_handler(self, name): } -class SecurityOptions (object): +# TODO 3.0: drop this, we barely use it ourselves, it badly replicates the +# Transport-internal algorithm management, AND does so in a way which doesn't +# honor newer things like disabled_algorithms! +class SecurityOptions(object): """ Simple object containing the security preferences of an ssh transport. These are tuples of acceptable ciphers, digests, key types, and key @@ -2392,8 +3020,8 @@ class SecurityOptions (object): ``ValueError`` will be raised. If you try to assign something besides a tuple to one of the fields, ``TypeError`` will be raised. """ - #__slots__ = [ 'ciphers', 'digests', 'key_types', 'kex', 'compression', '_transport' ] - __slots__ = '_transport' + + __slots__ = "_transport" def __init__(self, transport): self._transport = transport @@ -2402,17 +3030,17 @@ def __repr__(self): """ Returns a string representation of this object, for debugging. """ - return '' % repr(self._transport) + return "".format(self._transport) def _set(self, name, orig, x): if type(x) is list: x = tuple(x) if type(x) is not tuple: - raise TypeError('expected tuple or list') + raise TypeError("expected tuple or list") possible = list(getattr(self._transport, orig).keys()) forbidden = [n for n in x if n not in possible] if len(forbidden) > 0: - raise ValueError('unknown cipher') + raise ValueError("unknown cipher") setattr(self._transport, name, x) @property @@ -2422,7 +3050,7 @@ def ciphers(self): @ciphers.setter def ciphers(self, x): - self._set('_preferred_ciphers', '_cipher_info', x) + self._set("_preferred_ciphers", "_cipher_info", x) @property def digests(self): @@ -2431,7 +3059,7 @@ def digests(self): @digests.setter def digests(self, x): - self._set('_preferred_macs', '_mac_info', x) + self._set("_preferred_macs", "_mac_info", x) @property def key_types(self): @@ -2440,8 +3068,7 @@ def key_types(self): @key_types.setter def key_types(self, x): - self._set('_preferred_keys', '_key_info', x) - + self._set("_preferred_keys", "_key_info", x) @property def kex(self): @@ -2450,7 +3077,7 @@ def kex(self): @kex.setter def kex(self, x): - self._set('_preferred_kex', '_kex_info', x) + self._set("_preferred_kex", "_kex_info", x) @property def compression(self): @@ -2459,10 +3086,10 @@ def compression(self): @compression.setter def compression(self, x): - self._set('_preferred_compression', '_compression_info', x) + self._set("_preferred_compression", "_compression_info", x) -class ChannelMap (object): +class ChannelMap(object): def __init__(self): # (id -> Channel) self._map = weakref.WeakValueDictionary() diff --git a/paramiko/util.py b/paramiko/util.py index 5ad1e3fd9..939702898 100644 --- a/paramiko/util.py +++ b/paramiko/util.py @@ -35,7 +35,8 @@ def inflate_long(s, always_positive=False): - """turns a normalized byte string into a long-int (adapted from Crypto.Util.number)""" + """turns a normalized byte string into a long-int + (adapted from Crypto.Util.number)""" out = long(0) negative = 0 if not always_positive and (len(s) > 0) and (byte_ord(s[0]) >= 0x80): @@ -48,22 +49,24 @@ def inflate_long(s, always_positive=False): # noinspection PyAugmentAssignment s = filler * (4 - len(s) % 4) + s for i in range(0, len(s), 4): - out = (out << 32) + struct.unpack('>I', s[i:i+4])[0] + out = (out << 32) + struct.unpack(">I", s[i : i + 4])[0] if negative: - out -= (long(1) << (8 * len(s))) + out -= long(1) << (8 * len(s)) return out + deflate_zero = zero_byte if PY2 else 0 deflate_ff = max_byte if PY2 else 0xff def deflate_long(n, add_sign_padding=True): - """turns a long-int into a normalized byte string (adapted from Crypto.Util.number)""" + """turns a long-int into a normalized byte string + (adapted from Crypto.Util.number)""" # after much testing, this algorithm was deemed to be the fastest s = bytes() n = long(n) while (n != 0) and (n != -1): - s = struct.pack('>I', n & xffffffff) + s + s = struct.pack(">I", n & xffffffff) + s n >>= 32 # strip off leading zeros, FFs for i in enumerate(s): @@ -78,7 +81,7 @@ def deflate_long(n, add_sign_padding=True): s = zero_byte else: s = max_byte - s = s[i[0]:] + s = s[i[0] :] if add_sign_padding: if (n == 0) and (byte_ord(s[0]) >= 0x80): s = zero_byte + s @@ -87,31 +90,33 @@ def deflate_long(n, add_sign_padding=True): return s -def format_binary(data, prefix=''): +def format_binary(data, prefix=""): x = 0 out = [] while len(data) > x + 16: - out.append(format_binary_line(data[x:x+16])) + out.append(format_binary_line(data[x : x + 16])) x += 16 if x < len(data): out.append(format_binary_line(data[x:])) - return [prefix + x for x in out] + return [prefix + line for line in out] def format_binary_line(data): - left = ' '.join(['%02X' % byte_ord(c) for c in data]) - right = ''.join([('.%c..' % c)[(byte_ord(c)+63)//95] for c in data]) - return '%-50s %s' % (left, right) + left = " ".join(["{:02X}".format(byte_ord(c)) for c in data]) + right = "".join( + [".{:c}..".format(byte_ord(c))[(byte_ord(c) + 63) // 95] for c in data] + ) + return "{:50s} {}".format(left, right) def safe_string(s): - out = b('') + out = b"" for c in s: i = byte_ord(c) if 32 <= i <= 127: out += byte_chr(i) else: - out += b('%%%02X' % i) + out += b("%{:02X}".format(i)) return out @@ -131,7 +136,7 @@ def bit_length(n): def tb_strings(): - return ''.join(traceback.format_exception(*sys.exc_info())).split('\n') + return "".join(traceback.format_exception(*sys.exc_info())).split("\n") def generate_key_bytes(hash_alg, salt, key, nbytes): @@ -182,12 +187,16 @@ def load_host_keys(filename): nested dict of `.PKey` objects, indexed by hostname and then keytype """ from paramiko.hostkeys import HostKeys + return HostKeys(filename) def parse_ssh_config(file_obj): """ Provided only as a backward-compatible wrapper around `.SSHConfig`. + + .. deprecated:: 2.7 + Use `SSHConfig.from_file` instead. """ config = SSHConfig() config.parse(file_obj) @@ -215,6 +224,7 @@ def mod_inverse(x, m): u2 += m return u2 + _g_thread_ids = {} _g_thread_counter = 0 _g_thread_lock = threading.Lock() @@ -236,30 +246,34 @@ def get_thread_id(): def log_to_file(filename, level=DEBUG): - """send paramiko logs to a logfile, if they're not already going somewhere""" - l = logging.getLogger("paramiko") - if len(l.handlers) > 0: + """send paramiko logs to a logfile, + if they're not already going somewhere""" + logger = logging.getLogger("paramiko") + if len(logger.handlers) > 0: return - l.setLevel(level) - f = open(filename, 'w') - lh = logging.StreamHandler(f) - lh.setFormatter(logging.Formatter('%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d %(name)s: %(message)s', - '%Y%m%d-%H:%M:%S')) - l.addHandler(lh) + logger.setLevel(level) + f = open(filename, "a") + handler = logging.StreamHandler(f) + frm = "%(levelname)-.3s [%(asctime)s.%(msecs)03d] thr=%(_threadid)-3d" + frm += " %(name)s: %(message)s" + handler.setFormatter(logging.Formatter(frm, "%Y%m%d-%H:%M:%S")) + logger.addHandler(handler) # make only one filter object, so it doesn't get applied more than once -class PFilter (object): +class PFilter(object): def filter(self, record): record._threadid = get_thread_id() return True + + _pfilter = PFilter() def get_logger(name): - l = logging.getLogger(name) - l.addFilter(_pfilter) - return l + logger = logging.getLogger(name) + logger.addFilter(_pfilter) + return logger def retry_on_signal(function): @@ -277,7 +291,7 @@ def constant_time_bytes_eq(a, b): return False res = 0 # noinspection PyUnresolvedReferences - for i in (xrange if PY2 else range)(len(a)): + for i in (xrange if PY2 else range)(len(a)): # noqa: F821 res |= byte_ord(a[i]) ^ byte_ord(b[i]) return res == 0 diff --git a/paramiko/win_openssh.py b/paramiko/win_openssh.py new file mode 100644 index 000000000..ece7c8fd7 --- /dev/null +++ b/paramiko/win_openssh.py @@ -0,0 +1,40 @@ +# Copyright (C) 2021 Lew Gordon +# Copyright (C) 2022 Patrick Spendrin +# +# This file is part of paramiko. +# +# Paramiko is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation; either version 2.1 of the License, or (at your option) +# any later version. +# +# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Paramiko; if not, write to the Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. + +import os.path + +PIPE_NAME = r"\\.\pipe\openssh-ssh-agent" + + +def can_talk_to_agent(): + return os.path.exists(PIPE_NAME) + + +class OpenSSHAgentConnection: + def __init__(self): + self._pipe = open(PIPE_NAME, "rb+", buffering=0) + + def send(self, data): + return self._pipe.write(data) + + def recv(self, n): + return self._pipe.read(n) + + def close(self): + return self._pipe.close() diff --git a/paramiko/win_pageant.py b/paramiko/win_pageant.py index 4b482beec..a550b7f33 100644 --- a/paramiko/win_pageant.py +++ b/paramiko/win_pageant.py @@ -25,13 +25,13 @@ import ctypes.wintypes import platform import struct -from paramiko.util import * +from paramiko.common import zero_byte from paramiko.py3compat import b try: - import _thread as thread # Python 3.x + import _thread as thread # Python 3.x except ImportError: - import thread # Python 2.5-2.7 + import thread # Python 2.5-2.7 from . import _winapi @@ -44,7 +44,7 @@ def _get_pageant_window_object(): - return ctypes.windll.user32.FindWindowA(b('Pageant'), b('Pageant')) + return ctypes.windll.user32.FindWindowA(b"Pageant", b"Pageant") def can_talk_to_agent(): @@ -57,7 +57,10 @@ def can_talk_to_agent(): return bool(_get_pageant_window_object()) -ULONG_PTR = ctypes.c_uint64 if platform.architecture()[0] == '64bit' else ctypes.c_uint32 +if platform.architecture()[0] == "64bit": + ULONG_PTR = ctypes.c_uint64 +else: + ULONG_PTR = ctypes.c_uint32 class COPYDATASTRUCT(ctypes.Structure): @@ -65,10 +68,11 @@ class COPYDATASTRUCT(ctypes.Structure): ctypes implementation of http://msdn.microsoft.com/en-us/library/windows/desktop/ms649010%28v=vs.85%29.aspx """ + _fields_ = [ - ('num_data', ULONG_PTR), - ('data_size', ctypes.wintypes.DWORD), - ('data_loc', ctypes.c_void_p), + ("num_data", ULONG_PTR), + ("data_size", ctypes.wintypes.DWORD), + ("data_loc", ctypes.c_void_p), ] @@ -83,27 +87,29 @@ def _query_pageant(msg): return None # create a name for the mmap - map_name = 'PageantRequest%08x' % thread.get_ident() + map_name = "PageantRequest%08x" % thread.get_ident() - pymap = _winapi.MemoryMap(map_name, _AGENT_MAX_MSGLEN, - _winapi.get_security_attributes_for_user(), - ) + pymap = _winapi.MemoryMap( + map_name, _AGENT_MAX_MSGLEN, _winapi.get_security_attributes_for_user() + ) with pymap: pymap.write(msg) # Create an array buffer containing the mapped filename - char_buffer = array.array("b", b(map_name) + zero_byte) + char_buffer = array.array("b", b(map_name) + zero_byte) # noqa char_buffer_address, char_buffer_size = char_buffer.buffer_info() # Create a string to use for the SendMessage function call - cds = COPYDATASTRUCT(_AGENT_COPYDATA_ID, char_buffer_size, - char_buffer_address) + cds = COPYDATASTRUCT( + _AGENT_COPYDATA_ID, char_buffer_size, char_buffer_address + ) - response = ctypes.windll.user32.SendMessageA(hwnd, - win32con_WM_COPYDATA, ctypes.sizeof(cds), ctypes.byref(cds)) + response = ctypes.windll.user32.SendMessageA( + hwnd, win32con_WM_COPYDATA, ctypes.sizeof(cds), ctypes.byref(cds) + ) if response > 0: pymap.seek(0) datalen = pymap.read(4) - retlen = struct.unpack('>I', datalen)[0] + retlen = struct.unpack(">I", datalen)[0] return datalen + pymap.read(retlen) return None @@ -124,10 +130,10 @@ def send(self, data): def recv(self, n): if self._response is None: - return '' + return "" ret = self._response[:n] self._response = self._response[n:] - if self._response == '': + if self._response == "": self._response = None return ret diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..be207cd86 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,10 @@ +[pytest] +# We use pytest-relaxed just for its utils at the moment, so disable it at the +# plugin level until we adapt test organization to really use it. +addopts = -p no:relaxed +# Loop on failure +looponfailroots = tests paramiko +# Ignore some warnings we cannot easily handle. +filterwarnings = + ignore::DeprecationWarning:pkg_resources + ignore::cryptography.utils.CryptographyDeprecationWarning diff --git a/setup.cfg b/setup.cfg index 1a8f89dec..59b47d603 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,8 @@ [wheel] universal = 1 +[metadata] +license_file = LICENSE + [coverage:run] omit = paramiko/_winapi.py diff --git a/setup.py b/setup.py index ed3bbcd60..c9de01540 100644 --- a/setup.py +++ b/setup.py @@ -16,67 +16,86 @@ # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Suite 500, Boston, MA 02110-1335 USA. - -longdesc = ''' -This is a library for making SSH2 connections (client or server). -Emphasis is on using SSH2 as an alternative to SSL for making secure -connections between python scripts. All major ciphers and hash methods -are supported. SFTP client and server mode are both supported too. - -Required packages: - Cryptography - -To install the `in-development version -`_, use -`pip install paramiko==dev`. -''' - import sys from setuptools import setup - -if sys.platform == 'darwin': +if sys.platform == "darwin": import setup_helper + setup_helper.install_custom_make_tarball() +long_description = open("README.rst").read() # Version info -- read without importing _locals = {} -with open('paramiko/_version.py') as fp: +with open("paramiko/_version.py") as fp: exec(fp.read(), None, _locals) -version = _locals['__version__'] +version = _locals["__version__"] +# Have to build extras_require dynamically because it doesn't allow +# self-referencing and I hate repeating myself. +extras_require = { + "gssapi": [ + "pyasn1>=0.1.7", + 'gssapi>=1.4.1;platform_system!="Windows"', + 'pywin32>=2.1.8;platform_system=="Windows"', + ], + "ed25519": ["pynacl>=1.0.1", "bcrypt>=3.1.3"], + "invoke": ["invoke>=1.3"], +} +everything = [] +for subdeps in extras_require.values(): + everything.extend(subdeps) +extras_require["all"] = everything setup( - name = "paramiko", - version = version, - description = "SSH2 protocol library", - long_description = longdesc, - author = "Jeff Forcier", - author_email = "jeff@bitprophet.org", - url = "https://github.com/paramiko/paramiko/", - packages = [ 'paramiko' ], - license = 'LGPL', - platforms = 'Posix; MacOS X; Windows', - classifiers = [ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', - 'Operating System :: OS Independent', - 'Topic :: Internet', - 'Topic :: Security :: Cryptography', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', + name="paramiko", + version=version, + description="SSH2 protocol library", + long_description=long_description, + author="Jeff Forcier", + author_email="jeff@bitprophet.org", + url="https://paramiko.org", + project_urls={ + "Docs": "https://docs.paramiko.org", + "Source": "https://github.com/paramiko/paramiko", + "Issues": "https://github.com/paramiko/paramiko/issues", + "Changelog": "https://www.paramiko.org/changelog.html", + "CI": "https://app.circleci.com/pipelines/github/paramiko/paramiko", + }, + packages=["paramiko"], + license="LGPL", + platforms="Posix; MacOS X; Windows", + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: " + "GNU Library or Lesser General Public License (LGPL)", + "Operating System :: OS Independent", + "Topic :: Internet", + "Topic :: Security :: Cryptography", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", ], + # TODO 3.0: remove bcrypt, pynacl and update installation docs noting that + # use of the extras_require ("paramiko[ed2559]") is now required for those + # TODO 3.0: alternately, given how prevalent ed25519 is now, and how we use + # invoke for (increasing) subproc stuff, consider making the default flavor + # "full" and adding a "minimal" or similar that is just-crypto? + # TODO 3.0: remove six, obviously install_requires=[ - 'cryptography>=1.1', - 'pyasn1>=0.1.7', + "bcrypt>=3.1.3", + "cryptography>=2.5", + "pynacl>=1.0.1", + "six", ], + extras_require=extras_require, ) diff --git a/setup_helper.py b/setup_helper.py index 9e3834b37..d0a8700ea 100644 --- a/setup_helper.py +++ b/setup_helper.py @@ -40,6 +40,7 @@ except ImportError: getgrnam = None + def _get_gid(name): """Returns a gid, given a group name.""" if getgrnam is None or name is None: @@ -52,6 +53,7 @@ def _get_gid(name): return result[2] return None + def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: @@ -64,8 +66,16 @@ def _get_uid(name): return result[2] return None -def make_tarball(base_name, base_dir, compress='gzip', verbose=0, dry_run=0, - owner=None, group=None): + +def make_tarball( + base_name, + base_dir, + compress="gzip", + verbose=0, + dry_run=0, + owner=None, + group=None, +): """Create a tar file from all the files under 'base_dir'. This file may be compressed. @@ -77,7 +87,7 @@ def make_tarball(base_name, base_dir, compress='gzip', verbose=0, dry_run=0, For 'gzip' and 'bzip2' the internal tarfile module will be used. For 'compress' the .tar will be created using tarfile, and then we will spawn 'compress' afterwards. - The output tar file will be named 'base_name' + ".tar", + The output tar file will be named 'base_name' + ".tar", possibly plus the appropriate compression extension (".gz", ".bz2" or ".Z"). Return the output filename. """ @@ -87,26 +97,26 @@ def make_tarball(base_name, base_dir, compress='gzip', verbose=0, dry_run=0, # "create a tree of hardlinks" step! (Would also be nice to # detect GNU tar to use its 'z' option and save a step.) - compress_ext = { 'gzip': ".gz", - 'bzip2': '.bz2', - 'compress': ".Z" } + compress_ext = {"gzip": ".gz", "bzip2": ".bz2", "compress": ".Z"} # flags for compression program, each element of list will be an argument - tarfile_compress_flag = {'gzip':'gz', 'bzip2':'bz2'} - compress_flags = {'compress': ["-f"]} + tarfile_compress_flag = {"gzip": "gz", "bzip2": "bz2"} + compress_flags = {"compress": ["-f"]} if compress is not None and compress not in compress_ext.keys(): - raise ValueError("bad value for 'compress': must be None, 'gzip'," - "'bzip2' or 'compress'") + raise ValueError( + "bad value for 'compress': must be None, 'gzip'," + "'bzip2' or 'compress'" + ) archive_name = base_name + ".tar" if compress and compress in tarfile_compress_flag: archive_name += compress_ext[compress] - mode = 'w:' + tarfile_compress_flag.get(compress, '') + mode = "w:" + tarfile_compress_flag.get(compress, "") mkpath(os.path.dirname(archive_name), dry_run=dry_run) - log.info('Creating tar file %s with mode %s' % (archive_name, mode)) + log.info("Creating tar file %s with mode %s" % (archive_name, mode)) uid = _get_uid(owner) gid = _get_gid(group) @@ -134,21 +144,22 @@ def _set_uid_gid(tarinfo): tar.close() if compress and compress not in tarfile_compress_flag: - spawn([compress] + compress_flags[compress] + [archive_name], - dry_run=dry_run) + spawn( + [compress] + compress_flags[compress] + [archive_name], + dry_run=dry_run, + ) return archive_name + compress_ext[compress] else: return archive_name _custom_formats = { - 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), - 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), + "gztar": (make_tarball, [("compress", "gzip")], "gzip'ed tar-file"), + "bztar": (make_tarball, [("compress", "bzip2")], "bzip2'ed tar-file"), + "ztar": (make_tarball, [("compress", "compress")], "compressed tar file"), + "tar": (make_tarball, [("compress", None)], "uncompressed tar file"), } # Hack in and insert ourselves into the distutils code base def install_custom_make_tarball(): distutils.archive_util.ARCHIVE_FORMATS.update(_custom_formats) - diff --git a/sites/docs/api/config.rst b/sites/docs/api/config.rst index afb004c9a..d42de8ac0 100644 --- a/sites/docs/api/config.rst +++ b/sites/docs/api/config.rst @@ -1,5 +1,131 @@ +============= Configuration ============= +Paramiko **does not itself** leverage `OpenSSH-style config file directives +`_, but it **does** implement a parser for the format, which users +can honor themselves (and is used by higher-level libraries, such as +`Fabric`_). + +The API for this is `.SSHConfig`, which loads SSH config files from disk, +file-like object, or string and exposes a "look up a hostname, get a dict of +applicable keywords/values back" functionality. + +As with OpenSSH's own support, this dict will contain values from across the +parsed file, depending on the order in which keywords were encountered and how +specific or generic the ``Host`` or ``Match`` directives were. + +.. note:; + Result keys are lowercased for consistency and ease of deduping, as the + overall parsing/matching is itself case-insensitive. Thus, a source file + containing e.g. ``ProxyCommand`` will result in lookup results like + ``{"proxycommand": "shell command here"}``. + + +.. _ssh-config-support: + +Keywords currently supported +============================ + +The following is an alphabetical list of which `ssh_config`_ directives +Paramiko interprets during the parse/lookup process (as above, actual SSH +connections **do not** reference parsed configs). Departures from `OpenSSH's +implementation `_ (e.g. to support backwards compat with older +Paramiko releases) are included. A keyword by itself means no known departures. + +- ``AddressFamily``: used when looking up the local hostname for purposes of + expanding the ``%l``/``%L`` :ref:`tokens ` (this is actually a minor + value-add on top of OpenSSH, which doesn't actually honor this setting when + expanding ``%l``). +- ``CanonicalDomains`` + + .. versionadded:: 2.7 + +- ``CanonicalizeFallbackLocal``: when ``no``, triggers raising of + `.CouldNotCanonicalize` for target hostnames which do not successfully + canonicalize. + + .. versionadded:: 2.7 + +- ``CanonicalizeHostname``: along with the other ``Canonicaliz*`` settings + (sans ``CanonicalizePermittedCNAMEs``, which is not yet implemented), enables + hostname canonicalization, insofar as calling `.SSHConfig.lookup` with a + given hostname will return a canonicalized copy of the config data, including + an updated ``HostName`` value. + + .. versionadded:: 2.7 + +- ``CanonicalizeMaxDots`` + + .. versionadded:: 2.7 + +- ``Host`` +- ``HostName``: used in ``%h`` :ref:`token expansion ` +- ``Match``: fully supported, with the following caveats: + + - You must have the optional dependency Invoke installed; see :ref:`the + installation docs ` (in brief: install + ``paramiko[invoke]`` or ``paramiko[all]``). + - As usual, connection-time information is not present during config + lookup, and thus cannot be used to determine matching. This primarily + impacts ``Match user``, which can match against loaded ``User`` values + but has no knowledge about connection-time usernames. + + .. versionadded:: 2.7 + +- ``Port``: supplies potential values for ``%p`` :ref:`token expansion + `. +- ``ProxyCommand``: see our `.ProxyCommand` class for an easy + way to honor this keyword from a config you've parsed. + + - Honors :ref:`token expansion `. + - When a lookup would result in an effective ``ProxyCommand none``, + Paramiko (as of 1.x-2.x) strips it from the resulting dict entirely. A + later major version may retain the ``"none"`` marker for clarity's sake. + +- ``User``: supplies potential values for ``%u`` :ref:`token expansion + `. + +.. _TOKENS: + +Expansion tokens +---------------- + +We support most SSH config expansion tokens where possible, so when they are +present in a config file source, the result of a `.SSHConfig.lookup` will +contain the expansions/substitutions (based on the rest of the config or +properties of the local system). + +Specifically, we are known to support the below, where applicable (e.g. as in +OpenSSH, ``%L`` works in ``ControlPath`` but not elsewhere): + +- ``%C`` +- ``%d`` +- ``%h`` +- ``%l`` +- ``%L`` +- ``%n`` +- ``%p`` +- ``%r`` +- ``%u``: substitutes the configured ``User`` value, or the local user (as seen + by ``getpass.getuser``) if not specified. + +In addition, we extend OpenSSH's tokens as follows: + +- ``~`` is treated like ``%d`` (expands to the local user's home directory + path) when expanding ``ProxyCommand`` values, since ``ProxyCommand`` does not + natively support ``%d`` for some reason. + + +.. _ssh_config: https://man.openbsd.org/ssh_config +.. _Fabric: http://fabfile.org + + +``config`` module API documentation +=================================== + +Mostly of interest to contributors; see previous section for behavioral +details. + .. automodule:: paramiko.config :member-order: bysource diff --git a/sites/docs/api/keys.rst b/sites/docs/api/keys.rst index c6412f77d..a456f502d 100644 --- a/sites/docs/api/keys.rst +++ b/sites/docs/api/keys.rst @@ -21,3 +21,8 @@ ECDSA ===== .. automodule:: paramiko.ecdsakey + +Ed25519 +======= + +.. automodule:: paramiko.ed25519key diff --git a/sites/docs/api/ssh_gss.rst b/sites/docs/api/ssh_gss.rst index 7a687e11d..155fcfffd 100644 --- a/sites/docs/api/ssh_gss.rst +++ b/sites/docs/api/ssh_gss.rst @@ -7,7 +7,10 @@ GSS-API authentication .. autoclass:: _SSH_GSSAuth :member-order: bysource -.. autoclass:: _SSH_GSSAPI +.. autoclass:: _SSH_GSSAPI_OLD + :member-order: bysource + +.. autoclass:: _SSH_GSSAPI_NEW :member-order: bysource .. autoclass:: _SSH_SSPI diff --git a/sites/docs/conf.py b/sites/docs/conf.py index 5674fed1c..4805a03cc 100644 --- a/sites/docs/conf.py +++ b/sites/docs/conf.py @@ -1,16 +1,25 @@ # Obtain shared config values import os, sys -sys.path.append(os.path.abspath('..')) -sys.path.append(os.path.abspath('../..')) +from os.path import abspath, join, dirname + +sys.path.append(abspath("..")) +sys.path.append(abspath("../..")) from shared_conf import * # Enable autodoc, intersphinx -extensions.extend(['sphinx.ext.autodoc']) +extensions.extend(["sphinx.ext.autodoc"]) # Autodoc settings -autodoc_default_flags = ['members', 'special-members'] +autodoc_default_flags = ["members", "special-members"] + +# Default is 'local' building, but reference the public www site when building +# under RTD. +target = join(dirname(__file__), "..", "www", "_build") +if os.environ.get("READTHEDOCS") == "True": + target = "http://paramiko.org" +intersphinx_mapping["www"] = (target, None) # Sister-site links to WWW -html_theme_options['extra_nav_links'] = { - "Main website": 'http://www.paramiko.org', +html_theme_options["extra_nav_links"] = { + "Main website": "http://www.paramiko.org" } diff --git a/sites/shared_conf.py b/sites/shared_conf.py index 99fab315d..c406054c8 100644 --- a/sites/shared_conf.py +++ b/sites/shared_conf.py @@ -5,36 +5,30 @@ # Alabaster theme + mini-extension html_theme_path = [alabaster.get_path()] -extensions = ['alabaster', 'sphinx.ext.intersphinx'] +extensions = ["alabaster", "sphinx.ext.intersphinx"] # Paths relative to invoking conf.py - not this shared file -html_theme = 'alabaster' +html_theme = "alabaster" html_theme_options = { - 'description': "A Python implementation of SSHv2.", - 'github_user': 'paramiko', - 'github_repo': 'paramiko', - 'analytics_id': 'UA-18486793-2', - 'travis_button': True, + "description": "A Python implementation of SSHv2.", + "github_user": "paramiko", + "github_repo": "paramiko", + "analytics_id": "UA-18486793-2", + "travis_button": False, + "tidelift_url": "https://tidelift.com/subscription/pkg/pypi-paramiko?utm_source=pypi-paramiko&utm_medium=referral&utm_campaign=docs", } html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'searchbox.html', - 'donate.html', - ] + "**": ["about.html", "navigation.html", "searchbox.html", "donate.html"] } # Everything intersphinx's to Python -intersphinx_mapping = { - 'python': ('http://docs.python.org/2.6', None), -} +intersphinx_mapping = {"python": ("https://docs.python.org/2.7/", None)} # Regular settings -project = 'Paramiko' +project = "Paramiko" year = datetime.now().year -copyright = '%d Jeff Forcier' % year -master_doc = 'index' -templates_path = ['_templates'] -exclude_trees = ['_build'] -source_suffix = '.rst' -default_role = 'obj' +copyright = "{} Jeff Forcier".format(year) +master_doc = "index" +templates_path = ["_templates"] +exclude_trees = ["_build"] +source_suffix = ".rst" +default_role = "obj" diff --git a/sites/www/changelog.rst b/sites/www/changelog.rst index c7149d9a8..45bacd225 100644 --- a/sites/www/changelog.rst +++ b/sites/www/changelog.rst @@ -2,55 +2,831 @@ Changelog ========= -* :release:`2.0.2 <2016-07-25>` -* :release:`1.17.2 <2016-07-25>` -* :release:`1.16.3 <2016-07-25>` -* :bug:`673 (1.16+)` (via :issue:`681`) Fix protocol banner read errors +- :release:`2.10.1 <2022-03-11>` +- :bug:`-` (`CVE-2022-24302 + `_) Creation + of new private key files using `~paramiko.pkey.PKey` subclasses was subject + to a race condition between file creation & mode modification, which could be + exploited by an attacker with knowledge of where the Paramiko-using code + would write out such files. + + This has been patched by using `os.open` and `os.fdopen` to ensure new files + are opened with the correct mode immediately. We've left the subsequent + explicit ``chmod`` in place to minimize any possible disruption, though it + may get removed in future backwards-incompatible updates. + + Thanks to Jan Schejbal for the report & feedback on the solution, and to + Jeremy Katz at Tidelift for coordinating the disclosure. +- :release:`2.10.0 <2022-03-11>` +- :feature:`1976` Add support for the ``%C`` token when parsing SSH config + files. Foundational PR submitted by ``@jbrand42``. +- :feature:`1509` (via :issue:`1868`, :issue:`1837`) Add support for OpenSSH's + Windows agent as a fallback when Putty/WinPageant isn't available or + functional. Reported by ``@benj56`` with patches/PRs from ``@lewgordon`` and + Patrick Spendrin. +- :bug:`892 major` Significantly speed up low-level read/write actions on + `~paramiko.sftp_file.SFTPFile` objects by using `bytearray`/`memoryview`. + This is unlikely to change anything for users of the higher level methods + like `SFTPClient.get ` or + `SFTPClient.getfo `, but users of + `SFTPClient.open ` will likely see + orders of magnitude improvements for files larger than a few megabytes in + size. + + Thanks to ``@jkji`` for the original report and to Sevastian Tchernov for the + patch. +- :support:`1985` Add ``six`` explicitly to install-requires; it snuck into + active use at some point but has only been indicated by transitive dependency + on ``bcrypt`` until they somewhat-recently dropped it. This will be + short-lived until we `drop Python 2 + support `_. Thanks to Sondre + Lillebø Gundersen for catch & patch. +- :release:`2.9.2 <2022-01-08>` +- :bug:`-` Connecting to servers which support ``server-sig-algs`` but which + have no overlap between that list and what a Paramiko client supports, now + raise an exception instead of defaulting to ``rsa-sha2-512`` (since the use + of ``server-sig-algs`` allows us to know what the server supports). +- :bug:`-` Enhanced log output when connecting to servers that do not support + ``server-sig-algs`` extensions, making the new-as-of-2.9 defaulting to SHA2 + pubkey algorithms more obvious when it kicks in. +- :release:`2.9.1 <2021-12-24>` +- :bug:`1955` Server-side support for ``rsa-sha2-256`` and ``ssh-rsa`` wasn't + fully operable after 2.9.0's release (signatures for RSA pubkeys were always + run through ``rsa-sha2-512`` instead). Report and early stab at a fix + courtesy of Jun Omae. +- :release:`2.9.0 <2021-12-23>` +- :feature:`1643` (also :issue:`1925`, :issue:`1644`, :issue:`1326`) Add + support for SHA-2 variants of RSA key verification algorithms (as described + in :rfc:`8332`) as well as limited SSH extension negotiation (:rfc:`8308`). + + .. warning:: + This change is slightly backwards incompatible, insofar as action is + required if your target systems do not support either RSA2 or the + ``server-sig-algs`` protocol extension. + + Specifically, you need to specify ``disabled_algorithms={'keys': + ['rsa-sha2-256', 'rsa-sha2-512']}`` in either `SSHClient + ` or `Transport + `. See below for details on why. + + How SSH servers/clients decide when and how to use this functionality can be + complicated; Paramiko's support is as follows: + + - Client verification of server host key during key exchange will now prefer + ``rsa-sha2-512``, ``rsa-sha2-256``, and legacy ``ssh-rsa`` algorithms, in + that order, instead of just ``ssh-rsa``. + + - Note that the preference order of other algorithm families such as + ``ed25519`` and ``ecdsa`` has not changed; for example, those two + groups are still preferred over RSA. + + - Server mode will now offer all 3 RSA algorithms for host key verification + during key exchange, similar to client mode, if it has been configured with + an RSA host key. + - Client mode key exchange now sends the ``ext-info-c`` flag signaling + support for ``MSG_EXT_INFO``, and support for parsing the latter + (specifically, its ``server-sig-algs`` flag) has been added. + - Client mode, when performing public key authentication with an RSA key or + cert, will act as follows: + + - In all cases, the list of algorithms to consider is based on the new + ``preferred_pubkeys`` list (see below) and ``disabled_algorithms`` + (specifically, its ``pubkeys`` key); this list, like with host keys, + prefers SHA2-512, SHA2-256 and SHA1, in that order. + - When the server does not send ``server-sig-algs``, Paramiko will attempt + the first algorithm in the above list. Clients connecting to legacy + servers should thus use ``disabled_algorithms`` to turn off SHA2. + - When the server does send ``server-sig-algs``, the first algorithm + supported by both ends is used, or if there is none, it falls back to the + previous behavior. + + - SSH agent support grew the ability to specify algorithm flags when + requesting private key signatures; this is now used to forward SHA2 + algorithms when appropriate. + - Server mode is now capable of pubkey auth involving SHA-2 signatures from + clients, provided one's server implementation actually provides for doing + so. + + - This includes basic support for sending ``MSG_EXT_INFO`` (containing + ``server-sig-algs`` only) to clients advertising ``ext-info-c`` in their + key exchange list. + + In order to implement the above, the following API additions were made: + + - `PKey.sign_ssh_data `: Grew an extra, optional + ``algorithm`` keyword argument (defaulting to ``None`` for most subclasses, + and to ``"ssh-rsa"`` for `~paramiko.rsakey.RSAKey`). + - A new `~paramiko.ssh_exception.SSHException` subclass was added, + `~paramiko.ssh_exception.IncompatiblePeer`, and is raised in all spots + where key exchange aborts due to algorithmic incompatibility. + + - Like all other exceptions in that module, it inherits from + ``SSHException``, and as we did not change anything else about the + raising (i.e. the attributes and message text are the same) this change + is backwards compatible. + + - `~paramiko.transport.Transport` grew a ``_preferred_pubkeys`` attribute and + matching ``preferred_pubkeys`` property to match the other, kex-focused, + such members. This allows client pubkey authentication to honor the + ``disabled_algorithms`` feature. + + Thanks to Krisztián Kovács for the report and an early stab at a patch, as + well as the numerous users who submitted feedback on the issue, including but + not limited to: Christopher Rabotin, Sam Bull, and Manfred Kaiser. + +- :release:`2.8.1 <2021-11-28>` +- :bug:`985` (via :issue:`992`) Fix listdir failure when server uses a locale. + Now on Python 2.7 `SFTPAttributes ` will + decode abbreviated month names correctly rather than raise + ``UnicodeDecodeError```. Patch courtesy of Martin Packman. +- :bug:`1024` Deleting items from `~paramiko.hostkeys.HostKeys` would + incorrectly raise `KeyError` even for valid keys, due to a logic bug. This + has been fixed. Report & patch credit: Jia Zhang. +- :bug:`1257` (also :issue:`1266`) Update RSA and ECDSA key decoding + subroutines to correctly catch exception types thrown by modern + versions of Cryptography (specifically ``TypeError`` and + its internal ``UnsupportedAlgorithm``). These exception classes will now + become `~paramiko.ssh_exception.SSHException` instances instead of bubbling + up. Thanks to Ignat Semenov for the report and ``@tylergarcianet`` for an + early patch. +- :bug:`-` (also :issue:`908`) Update `~paramiko.pkey.PKey` and subclasses to + compare (``__eq__``) via direct field/attribute comparison instead of hashing + (while retaining the existing behavior of ``__hash__`` via a slight + refactor). Big thanks to Josh Snyder and Jun Omae for the reports, and to + Josh Snyder for reproduction details & patch. + + .. warning:: + This fixes a security flaw! If you are running Paramiko on 32-bit systems + with low entropy (such as any 32-bit Python 2, or a 32-bit Python 3 which + is running with ``PYTHONHASHSEED=0``) it is possible for an attacker to + craft a new keypair from an exfiltrated public key, which Paramiko would + consider equal to the original key. + + This could enable attacks such as, but not limited to, the following: + + - Paramiko server processes would incorrectly authenticate the attacker + (using their generated private key) as if they were the victim. We see + this as the most plausible attack using this flaw. + - Paramiko client processes would incorrectly validate a connected server + (when host key verification is enabled) while subjected + to a man-in-the-middle attack. This impacts more users than the + server-side version, but also carries higher requirements for the + attacker, namely successful DNS poisoning or other MITM techniques. + +- :release:`2.8.0 <2021-10-09>` +- :support:`-` Administrivia overhaul, including but not limited to: + + - Migrate CI to CircleCI + - Primary dev branch is now ``main`` (renamed) + - Many README edits for clarity, modernization etc; including a bunch more + (and consistent) status badges & unification with main project site index + - PyPI page much more fleshed out (long_description is now filled in with the + README; sidebar links expanded; etc) + - flake8, pytest configs split out of setup.cfg into their own files + - Invoke/invocations (used by maintainers/contributors) upgraded to modern + versions + +- :bug:`1462 major` (via :issue:`1882`) Newer server-side key exchange + algorithms not intended to use SHA1 (``diffie-hellman-group14-sha256``, + ``diffie-hellman-group16-sha512``) were incorrectly using SHA1 after all, due + to a bug causing them to ignore the ``hash_algo`` class attribute. This has + been corrected. Big thanks to ``@miverson`` for the report and to Benno Rice + for the patch. +- :feature:`1846` Add a ``prefetch`` keyword argument to `SFTPClient.get `/`SFTPClient.getfo ` + so users who need to skip SFTP prefetching are able to conditionally turn it + off. Thanks to Github user ``@h3ll0r`` for the PR. +- :release:`2.7.2 <2020-08-30>` +- :support:`- backported` Update our CI to catch issues with sdist generation, + installation and testing. +- :support:`1727 backported` Add missing test suite fixtures directory to + MANIFEST.in, reinstating the ability to run Paramiko's tests from an sdist + tarball. Thanks to Sandro Tosi for reporting the issue and to Blazej Michalik + for the PR. +- :support:`1722 backported` Remove leading whitespace from OpenSSH RSA test + suite static key fixture, to conform better to spec. Credit: Alex Gaynor. +- :bug:`-` Fix incorrect string formatting causing unhelpful error message + annotation when using Kerberos/GSSAPI. (Thanks, newer version of flake8!) +- :bug:`1723` Fix incorrectly swapped order of ``p`` and ``q`` numbers when + loading OpenSSH-format RSA private keys. At minimum this should address a + slowdown when using such keys, and it also means Paramiko works with + Cryptography 3.1 and above (which complains strenuously when this problem + appears). Thanks to Alex Gaynor for the patch. +- :release:`2.7.1 <2019-12-09>` +- :bug:`1567` The new-style private key format (added in 2.7) suffered from an + unpadding bug which had been fixed earlier for Ed25519 (as that key type has + always used the newer format). That fix has been refactored and applied to + the base key class, courtesy of Pierce Lopez. +- :bug:`1565` (via :issue:`1566`) Fix a bug in support for ECDSA keys under the + newly supported OpenSSH key format. Thanks to Pierce Lopez for the patch. +- :release:`2.7.0 <2019-12-03>` +- :feature:`602` (via :issue:`1343`, :issue:`1313`, :issue:`618`) Implement + support for OpenSSH 6.5-style private key files (typically denoted as having + ``BEGIN OPENSSH PRIVATE KEY`` headers instead of PEM format's ``BEGIN RSA + PRIVATE KEY`` or similar). If you were getting any sort of weird auth error + from "modern" keys generated on newer operating system releases (such as + macOS Mojave), this is the first update to try. + + Major thanks to everyone who contributed or tested versions of the patch, + including but not limited to: Kevin Abel, Michiel Tiller, Pierce Lopez, and + Jared Hobbs. +- :bug:`- major` ``ssh_config`` :ref:`token expansion ` used a + different method of determining the local username (``$USER`` env var), + compared to what the (much older) client connection code does + (``getpass.getuser``, which includes ``$USER`` but may check other variables + first, and is generally much more comprehensive). Both modules now use + ``getpass.getuser``. +- :feature:`-` A couple of outright `~paramiko.config.SSHConfig` parse errors + were previously represented as vanilla ``Exception`` instances; as part of + recent feature work a more specific exception class, + `~paramiko.ssh_exception.ConfigParseError`, has been created. It is now also + used in those older spots, which is naturally backwards compatible. +- :feature:`717` Implement support for the ``Match`` keyword in ``ssh_config`` + files. Previously, this keyword was simply ignored & keywords inside such + blocks were treated as if they were part of the previous block. Thanks to + Michael Leinartas for the initial patchset. + + .. note:: + This feature adds a new :doc:`optional install dependency `, + `Invoke `_, for managing ``Match exec`` + subprocesses. + +- :support:`-` Additional :doc:`installation ` ``extras_require`` + "flavors" (``ed25519``, ``invoke``, and ``all``) have been added to + our packaging metadata; see the install docs for details. +- :bug:`- major` Paramiko's use of ``subprocess`` for ``ProxyCommand`` support + is conditionally imported to prevent issues on limited interpreter platforms + like Google Compute Engine. However, any resulting ``ImportError`` was lost + instead of preserved for raising (in the rare cases where a user tried + leveraging ``ProxyCommand`` in such an environment). This has been fixed. +- :bug:`- major` Perform deduplication of ``IdentityFile`` contents during + ``ssh_config`` parsing; previously, if your config would result in the same + value being encountered more than once, ``IdentityFile`` would contain that + many copies of the same string. +- :feature:`897` Implement most 'canonical hostname' ``ssh_config`` + functionality (``CanonicalizeHostname``, ``CanonicalDomains``, + ``CanonicalizeFallbackLocal``, and ``CanonicalizeMaxDots``; + ``CanonicalizePermittedCNAMEs`` has **not** yet been implemented). All were + previously silently ignored. Reported by Michael Leinartas. +- :support:`-` Explicitly document :ref:`which ssh_config features we + currently support `. Previously users just had to guess, + which is simply no good. +- :feature:`-` Add new convenience classmethod constructors to + `~paramiko.config.SSHConfig`: `~paramiko.config.SSHConfig.from_text`, + `~paramiko.config.SSHConfig.from_file`, and + `~paramiko.config.SSHConfig.from_path`. No more annoying two-step process! +- :release:`2.6.0 <2019-06-23>` +- :feature:`1463` Add a new keyword argument to `SSHClient.connect + ` and `~paramiko.transport.Transport`, + ``disabled_algorithms``, which allows selectively disabling one or more + kex/key/cipher/etc algorithms. This can be useful when disabling algorithms + your target server (or client) does not support cleanly, or to work around + unpatched bugs in Paramiko's own implementation thereof. +- :release:`2.5.1 <2019-06-23>` +- :release:`2.4.3 <2019-06-23>` +- :bug:`1306` (via :issue:`1400`) Fix Ed25519 key handling so certain key + comment lengths don't cause ``SSHException("Invalid key")`` (this was + technically a bug in how padding, or lack thereof, is + calculated/interpreted). Thanks to ``@parke`` for the bug report & Pierce + Lopez for the patch. +- :support:`1440` (with initial fixes via :issue:`1460`) Tweak many exception + classes so their string representations are more human-friendly; this also + includes incidental changes to some ``super()`` calls. + + The definitions of exceptions' ``__init__`` methods have *not* changed, nor + have any log messages been altered, so this should be backwards compatible + for everything except the actual exceptions' ``__str__()`` outputs. + + Thanks to Fabian Büchler for original report & Pierce Lopez for the + foundational patch. +- :support:`1311` (for :issue:`584`, replacing :issue:`1166`) Add + backwards-compatible support for the ``gssapi`` GSSAPI library, as the + previous backend (``python-gssapi``) has since become defunct. This change + also includes tests for the GSSAPI functionality. + + Big thanks to Anselm Kruis for the patch and to Sebastian Deiß (author of our + initial GSSAPI functionality) for review. + + .. note:: + This feature also adds ``setup.py`` 'extras' support for installing + Paramiko as ``paramiko[gssapi]``, which pulls in the optional + dependencies you had to get by hand previously. + + .. note:: + To be very clear, this patch **does not** remove support for the older + ``python-gssapi`` library. We *may* remove that support in a later release, + but for now, either library will work. Please upgrade to ``gssapi`` when + you can, however, as ``python-gssapi`` is no longer maintained upstream. + +- :bug:`322 major` `SSHClient.exec_command + ` previously returned a naive + `~paramiko.channel.ChannelFile` object for its ``stdin`` value; such objects + don't know to properly shut down the remote end's stdin when they + ``.close()``. This lead to issues (such as hangs) when running remote + commands that read from stdin. + + A new subclass, `~paramiko.channel.ChannelStdinFile`, has been created which + closes remote stdin when it itself is closed. + `~paramiko.client.SSHClient.exec_command` has been updated to use that class + for its ``stdin`` return value. + + Thanks to Brandon Rhodes for the report & steps to reproduce. +- :release:`2.5.0 <2019-06-09>` +- :feature:`1233` (also :issue:`1229`, :issue:`1332`) Add support for + encrypt-then-MAC (ETM) schemes (``hmac-sha2-256-etm@openssh.com``, + ``hmac-sha2-512-etm@openssh.com``) and two newer Diffie-Hellman group key + exchange algorithms (``group14``, using SHA256; and ``group16``, using + SHA512). Patch courtesy of Edgar Sousa. +- :feature:`532` (via :issue:`1384` and :issue:`1258`) Add support for + Curve25519 key exchange (aka ``curve25519-sha256@libssh.org``). Thanks to + Alex Gaynor and Dan Fuhry for supplying patches. +- :support:`1379` (also :issue:`1369`) Raise Cryptography dependency + requirement to version 2.5 (from 1.5) and update some deprecated uses of its + API. + + This removes a bunch of warnings of the style + ``CryptographyDeprecationWarning: encode_point has been deprecated on + EllipticCurvePublicNumbers and will be removed in a future version. Please + use EllipticCurvePublicKey.public_bytes to obtain both compressed and + uncompressed point encoding`` and similar, which users who had eventually + upgraded to Cryptography 2.x would encounter. + + .. warning:: + This change is backwards incompatible **if** you are unable to upgrade your + version of Cryptography. Please see `Cryptography's own changelog + `_ for details on what may + change when you upgrade; for the most part the only changes involved + dropping older Python versions (such as 2.6, 3.3, or some PyPy editions) + which Paramiko itself has already dropped. + +- :support:`1378 backported` Add support for the modern (as of Python 3.3) + import location of ``MutableMapping`` (used in host key management) to avoid + the old location becoming deprecated in Python 3.8. Thanks to Josh Karpel for + catch & patch. +- :release:`2.4.2 <2018-09-18>` +- :release:`2.3.3 <2018-09-18>` +- :release:`2.2.4 <2018-09-18>` +- :release:`2.1.6 <2018-09-18>` +- :release:`2.0.9 <2018-09-18>` +- :bug:`-` Modify protocol message handling such that ``Transport`` does not + respond to ``MSG_UNIMPLEMENTED`` with its own ``MSG_UNIMPLEMENTED``. This + behavior probably didn't cause any outright errors, but it doesn't seem to + conform to the RFCs and could cause (non-infinite) feedback loops in some + scenarios (usually those involving Paramiko on both ends). +- :bug:`1283` Fix exploit (CVE-2018-1000805) in Paramiko's server mode (**not** + client mode) where hostile clients could trick the server into thinking they + were authenticated without actually submitting valid authentication. + + Specifically, steps have been taken to start separating client and server + related message types in the message handling tables within ``Transport`` and + ``AuthHandler``; this work is not complete but enough has been performed to + close off this particular exploit (which was the only obvious such exploit + for this particular channel). + + Thanks to Daniel Hoffman for the detailed report. +- :support:`1292 backported (<2.4)` Backport changes from :issue:`979` (added + in Paramiko + 2.3) to Paramiko 2.0-2.2, using duck-typing to preserve backwards + compatibility. This allows these older versions to use newer Cryptography + sign/verify APIs when available, without requiring them (as is the case with + Paramiko 2.3+). + + Practically speaking, this change prevents spamming of + ``CryptographyDeprecationWarning`` notices which pop up in the above scenario + (older Paramiko, newer Cryptography). + + .. note:: + This is a no-op for Paramiko 2.3+, which have required newer Cryptography + releases since they were released. + +- :support:`1291 backported (<2.4)` Backport pytest support and application of + the ``black`` code formatter (both of which previously only existed in the + 2.4 branch and above) to everything 2.0 and newer. This makes back/forward + porting bugfixes significantly easier. +- :support:`1262 backported` Add ``*.pub`` files to the MANIFEST so distributed + source packages contain some necessary test assets. Credit: Alexander + Kapshuna. +- :feature:`1212` Updated `SSHConfig.lookup ` + so it returns a new, type-casting-friendly dict subclass + (`~paramiko.config.SSHConfigDict`) in lieu of dict literals. This ought to be + backwards compatible, and allows an easier way to check boolean or int type + ``ssh_config`` values. Thanks to Chris Rose for the patch. +- :support:`1191` Update our install docs with (somewhat) recently added + additional dependencies; we previously only required Cryptography, but the + docs never got updated after we incurred ``bcrypt`` and ``pynacl`` + requirements for Ed25519 key support. + + Additionally, ``pyasn1`` was never actually hard-required; it was necessary + during a development branch, and is used by the optional GSSAPI support, but + is not required for regular installation. Thus, it has been removed from our + ``setup.py`` and its imports in the GSSAPI code made optional. + + Credit to ``@stevenwinfield`` for highlighting the outdated install docs. +- :release:`2.4.1 <2018-03-12>` +- :release:`2.3.2 <2018-03-12>` +- :release:`2.2.3 <2018-03-12>` +- :release:`2.1.5 <2018-03-12>` +- :release:`2.0.8 <2018-03-12>` +- :release:`1.18.5 <2018-03-12>` +- :release:`1.17.6 <2018-03-12>` +- :bug:`1175 (1.17+)` Fix a security flaw (CVE-2018-7750) in Paramiko's server + mode (emphasis on **server** mode; this does **not** impact *client* use!) + where authentication status was not checked before processing channel-open + and other requests typically only sent after authenticating. Big thanks to + Matthijs Kooijman for the report. +- :bug:`1168` Add newer key classes for Ed25519 and ECDSA to + ``paramiko.__all__`` so that code introspecting that attribute, or using + ``from paramiko import *`` (such as some IDEs) sees them. Thanks to + ``@patriksevallius`` for the patch. +- :bug:`1039` Ed25519 auth key decryption raised an unexpected exception when + given a unicode password string (typical in python 3). Report by Theodor van + Nahl and fix by Pierce Lopez. +- :release:`2.4.0 <2017-11-14>` +- :feature:`-` Add a new ``passphrase`` kwarg to `SSHClient.connect + ` so users may disambiguate key-decryption + passphrases from password-auth passwords. (This is a backwards compatible + change; ``password`` will still pull double duty as a passphrase when + ``passphrase`` is not given.) +- :support:`-` Update ``tearDown`` of client test suite to avoid hangs due to + eternally blocking ``accept()`` calls on the internal server thread (which + can occur when test code raises an exception before actually connecting to + the server.) +- :bug:`1108 (1.17+)` Rename a private method keyword argument (which was named + ``async``) so that we're compatible with the upcoming Python 3.7 release + (where ``async`` is a new keyword.) Thanks to ``@vEpiphyte`` for the report. +- :support:`1100` Updated the test suite & related docs/metadata/config to be + compatible with pytest instead of using the old, custom, crufty + unittest-based ``test.py``. + + This includes marking known-slow tests (mostly the SFTP ones) so they can be + filtered out by ``inv test``'s default behavior; as well as other minor + tweaks to test collection and/or display (for example, GSSAPI tests are + collected, but skipped, instead of not even being collected by default as in + ``test.py``.) +- :support:`- backported` Include LICENSE file in wheel archives. +- :support:`1070` Drop Python 2.6 and Python 3.3 support; now only 2.7 and 3.4+ + are supported. If you're unable to upgrade from 2.6 or 3.3, please stick to + the Paramiko 2.3.x (or below) release lines. +- :release:`2.3.1 <2017-09-22>` +- :bug:`1071` Certificate support broke the no-certificate case for Ed25519 + keys (symptom is an ``AttributeError`` about ``public_blob``.) This went + uncaught due to cert autoload behavior (i.e. our test suite never actually + ran the no-cert case, because the cert existed!) Both issues have been fixed. + Thanks to John Hu for the report. +- :release:`2.3.0 <2017-09-18>` +- :release:`2.2.2 <2017-09-18>` +- :release:`2.1.4 <2017-09-18>` +- :release:`2.0.7 <2017-09-18>` +- :release:`1.18.4 <2017-09-18>` +- :bug:`1065` Add rekeying support to GSSAPI connections, which was erroneously + missing. Without this fix, any attempt to renegotiate the transport keys for + a ``gss-kex``-authed `~paramiko.transport.Transport` would cause a MIC + failure and terminate the connection. Thanks to Sebastian Deiß and Anselm + Kruis for the patch. +- :feature:`1063` Add a ``gss_trust_dns`` option to ``Client`` and + ``Transport`` to allow explicitly setting whether or not DNS canonicalization + should occur when using GSSAPI. Thanks to Richard E. Silverman for the report + & Sebastian Deiß for initial patchset. +- :bug:`1061` Clean up GSSAPI authentication procedures so they do not prevent + normal fallback to other authentication methods on failure. (In other words, + presence of GSSAPI functionality on a target server precluded use of _any_ + other auth type if the user was unable to pass GSSAPI auth.) Patch via Anselm + Kruis. +- :bug:`1060` Fix key exchange (kex) algorithm list for GSSAPI authentication; + previously, the list used solely out-of-date algorithms, and now contains + newer ones listed preferentially before the old. Credit: Anselm Kruis. +- :bug:`1055 (1.17+)` (also :issue:`1056`, :issue:`1057`, :issue:`1058`, + :issue:`1059`) Fix up host-key checking in our GSSAPI support, which was + previously using an incorrect API call. Thanks to Anselm Kruis for the + patches. +- :bug:`945 (1.18+)` (backport of :issue:`910` and re: :issue:`865`) SSHClient + now requests the type of host key it has (e.g. from known_hosts) and does not + consider a different type to be a "Missing" host key. This fixes a common + case where an ECDSA key is in known_hosts and the server also has an RSA host + key. Thanks to Pierce Lopez. +- :support:`979` Update how we use `Cryptography `_'s + signature/verification methods so we aren't relying on a deprecated API. + Thanks to Paul Kehrer for the patch. + + .. warning:: + This bumps the minimum Cryptography version from 1.1 to 1.5. Such an + upgrade should be backwards compatible and easy to do. See `their changelog + `_ for additional details. +- :support:`-` Ed25519 keys never got proper API documentation support; this + has been fixed. +- :feature:`1026` Update `~paramiko.ed25519key.Ed25519Key` so its constructor + offers the same ``file_obj`` parameter as its sibling key classes. Credit: + Michal Kuffa. +- :feature:`1013` Added pre-authentication banner support for the server + interface (`ServerInterface.get_banner + ` plus related support in + ``Transport/AuthHandler``.) Patch courtesy of Dennis Kaarsemaker. +- :bug:`60 major` (via :issue:`1037`) Paramiko originally defaulted to zlib + compression level 9 (when one connects with ``compression=True``; it defaults + to off.) This has been found to be quite wasteful and tends to cause much + longer transfers in most cases, than is necessary. + + OpenSSH defaults to compression level 6, which is a much more reasonable + setting (nearly identical compression characteristics but noticeably, + sometimes significantly, faster transmission); Paramiko now uses this value + instead. + + Thanks to Damien Dubé for the report and ``@DrNeutron`` for investigating & + submitting the patch. +- :support:`-` Display exception type and message when logging auth-rejection + messages (ones reading ``Auth rejected: unsupported or mangled public key``); + previously this error case had a bare except and did not display exactly why + the key failed. It will now append info such as ``KeyError: + 'some-unknown-type-string'`` or similar. +- :feature:`1042` (also partially :issue:`531`) Implement basic client-side + certificate authentication (as per the OpenSSH vendor extension.) + + The core implementation is `PKey.load_certificate + ` and its corresponding ``.public_blob`` + attribute on key objects, which is honored in the auth and transport modules. + Additionally, `SSHClient.connect ` will + now automatically load certificate data alongside private key data when one + has appropriately-named cert files (e.g. ``id_rsa-cert.pub``) - see its + docstring for details. + + Thanks to Jason Rigby for a first draft (:issue:`531`) and to Paul Kapp for + the second draft, upon which the current functionality has been based (with + modifications.) + + .. note:: + This support is client-focused; Paramiko-driven server code is capable of + handling cert-bearing pubkey auth packets, *but* it does not interpret any + cert-specific fields, so the end result is functionally identical to a + vanilla pubkey auth process (and thus requires e.g. prepopulated + authorized-keys data.) We expect full server-side cert support to follow + later. + +- :support:`1041` Modify logic around explicit disconnect + messages, and unknown-channel situations, so that they rely on centralized + shutdown code instead of running their own. This is at worst removing some + unnecessary code, and may help with some situations where Paramiko hangs at + the end of a session. Thanks to Paul Kapp for the patch. +- :support:`1012` (via :issue:`1016`) Enhance documentation around the new + `SFTP.posix_rename ` method so + it's referenced in the 'standard' ``rename`` method for increased visibility. + Thanks to Marius Flage for the report. +- :release:`2.2.1 <2017-06-13>` +- :bug:`993` Ed25519 host keys were not comparable/hashable, causing an + exception if such a key existed in a ``known_hosts`` file. Thanks to Oleh + Prypin for the report and Pierce Lopez for the fix. +- :bug:`990` The (added in 2.2.0) ``bcrypt`` dependency should have been on + version 3.1.3 or greater (was initially set to 3.0.0 or greater.) Thanks to + Paul Howarth for the report. +- :release:`2.2.0 <2017-06-09>` +- :release:`2.1.3 <2017-06-09>` +- :release:`2.0.6 <2017-06-09>` +- :release:`1.18.3 <2017-06-09>` +- :release:`1.17.5 <2017-06-09>` +- :bug:`865` SSHClient now requests the type of host key it has (e.g. from + known_hosts) and does not consider a different type to be a "Missing" host + key. This fixes a common case where an ECDSA key is in known_hosts and the + server also has an RSA host key. Thanks to Pierce Lopez. +- :support:`906 (1.18+)` Clean up a handful of outdated imports and related + tweaks. Thanks to Pierce Lopez. +- :bug:`984` Enhance default cipher preference order such that + ``aes(192|256)-cbc`` are preferred over ``blowfish-cbc``. Thanks to Alex + Gaynor. +- :bug:`971 (1.17+)` Allow any type implementing the buffer API to be used with + `BufferedFile `, `Channel + `, and `SFTPFile `. + This resolves a regression introduced in 1.13 with the Python 3 porting + changes, when using types such as ``memoryview``. Credit: Martin Packman. +- :bug:`741` (also :issue:`809`, :issue:`772`; all via :issue:`912`) Writing + encrypted/password-protected private key files was silently broken since 2.0 + due to an incorrect API call; this has been fixed. + + Includes a directly related fix, namely adding the ability to read + ``AES-256-CBC`` ciphered private keys (which is now what we tend to write out + as it is Cryptography's default private key cipher.) + + Thanks to ``@virlos`` for the original report, Chris Harris and ``@ibuler`` + for initial draft PRs, and ``@jhgorrell`` for the final patch. +- :feature:`65` (via :issue:`471`) Add support for OpenSSH's SFTP + ``posix-rename`` protocol extension (section 3.3 of `OpenSSH's protocol + extension document + `_), + via a new ``posix_rename`` method in `SFTPClient + ` and `SFTPServerInterface + `. Thanks to Wren Turkal + for the initial patch & Mika Pflüger for the enhanced, merged PR. +- :feature:`869` Add an ``auth_timeout`` kwarg to `SSHClient.connect + ` (default: 30s) to avoid hangs when the + remote end becomes unresponsive during the authentication step. Credit to + ``@timsavage``. + + .. note:: + This technically changes behavior, insofar as very slow auth steps >30s + will now cause timeout exceptions instead of completing. We doubt most + users will notice; those affected can simply give a higher value to + ``auth_timeout``. + +- :support:`921` Tighten up the ``__hash__`` implementation for various key + classes; less code is good code. Thanks to Francisco Couzo for the patch. +- :support:`956 backported (1.17+)` Switch code coverage service from + coveralls.io to codecov.io (& then disable the latter's auto-comments.) + Thanks to Nikolai Røed Kristiansen for the patch. +- :bug:`983` Move ``sha1`` above the now-arguably-broken ``md5`` in the list of + preferred MAC algorithms, as an incremental security improvement for users + whose target systems offer both. Credit: Pierce Lopez. +- :bug:`667` The RC4/arcfour family of ciphers has been broken since version + 2.0; but since the algorithm is now known to be completely insecure, we are + opting to remove support outright instead of fixing it. Thanks to Alex Gaynor + for catch & patch. +- :feature:`857` Allow `SSHClient.set_missing_host_key_policy + ` to accept policy + classes _or_ instances, instead of only instances, thus fixing a + long-standing gotcha for unaware users. +- :feature:`951` Add support for ECDH key exchange (kex), specifically the + algorithms ``ecdh-sha2-nistp256``, ``ecdh-sha2-nistp384``, and + ``ecdh-sha2-nistp521``. They now come before the older ``diffie-hellman-*`` + family of kex algorithms in the preferred-kex list. Thanks to Shashank + Veerapaneni for the patch & Pierce Lopez for a follow-up. +- :support:`- backported` A big formatting pass to clean up an enormous number + of invalid Sphinx reference links, discovered by switching to a modern, + rigorous nitpicking doc-building mode. +- :bug:`900` (via :issue:`911`) Prefer newer ``ecdsa-sha2-nistp`` keys over RSA + and DSA keys during host key selection. This improves compatibility with + OpenSSH, both in terms of general behavior, and also re: ability to properly + leverage OpenSSH-modified ``known_hosts`` files. Credit: ``@kasdoe`` for + original report/PR and Pierce Lopez for the second draft. +- :bug:`794` (via :issue:`981`) Prior support for ``ecdsa-sha2-nistp(384|521)`` + algorithms didn't fully extend to covering host keys, preventing connection + to hosts which only offer these key types and no others. This is now fixed. + Thanks to ``@ncoult`` and ``@kasdoe`` for reports and Pierce Lopez for the + patch. +- :feature:`325` (via :issue:`972`) Add Ed25519 support, for both host keys + and user authentication. Big thanks to Alex Gaynor for the patch. + + .. note:: + This change adds the ``bcrypt`` and ``pynacl`` Python libraries as + dependencies. No C-level dependencies beyond those previously required (for + Cryptography) have been added. + +- :support:`974 backported` Overhaul the codebase to be PEP-8, etc, compliant + (i.e. passes the maintainer's preferred `flake8 `_ + configuration) and add a ``flake8`` step to the Travis config. Big thanks to + Dorian Pula! +- :bug:`949 (1.17+)` SSHClient and Transport could cause a memory leak if + there's a connection problem or protocol error, even if ``Transport.close()`` + is called. Thanks Kyle Agronick for the discovery and investigation, and + Pierce Lopez for assistance. +- :bug:`683 (1.17+)` Make ``util.log_to_file`` append instead of replace. + Thanks to ``@vlcinsky`` for the report. +- :release:`2.1.2 <2017-02-20>` +- :release:`2.0.5 <2017-02-20>` +- :release:`1.18.2 <2017-02-20>` +- :release:`1.17.4 <2017-02-20>` +- :bug:`853 (1.17+)` Tweak how `RSAKey.__str__ ` + behaves so it doesn't cause ``TypeError`` under Python 3. Thanks to Francisco + Couzo for the report. +- :bug:`862 (1.17+)` (via :issue:`863`) Avoid test suite exceptions on + platforms lacking ``errno.ETIME`` (which seems to be some FreeBSD and some + Windows environments.) Thanks to Sofian Brabez. +- :bug:`44 (1.17+)` (via :issue:`891`) `SSHClient ` + now gives its internal `Transport ` a handle on + itself, preventing garbage collection of the client until the session is + closed. Without this, some code which returns stream or transport objects + without the client that generated them, would result in premature session + closure when the client was GCd. Credit: ``@w31rd0`` for original report, + Omer Anson for the patch. +- :bug:`713 (<2.0)` (via :issue:`714` and :issue:`889`) Don't pass + initialization vectors to PyCrypto when dealing with counter-mode ciphers; + newer PyCrypto versions throw an exception otherwise (older ones simply + ignored this parameter altogether). Thanks to ``@jmh045000`` for report & + patches. +- :bug:`895 (1.17+)` Fix a bug in server-mode concerning multiple interactive + auth steps (which were incorrectly responded to). Thanks to Dennis + Kaarsemaker for catch & patch. +- :support:`866 backported (1.17+)` (also :issue:`838`) Remove an old + test-related file we don't support, and add PyPy to Travis-CI config. Thanks + to Pierce Lopez for the final patch and Pedro Rodrigues for an earlier + edition. +- :release:`2.1.1 <2016-12-12>` +- :release:`2.0.4 <2016-12-12>` +- :release:`1.18.1 <2016-12-12>` +- :bug:`859 (1.18+)` (via :issue:`860`) A tweak to the original patch + implementing :issue:`398` was not fully applied, causing calls to + `~paramiko.client.SSHClient.invoke_shell` to fail with ``AttributeError``. + This has been fixed. Patch credit: Kirk Byers. +- :bug:`-` Accidentally merged the new features from 1.18.0 into the + 2.0.x bugfix-only branch. This included merging a bug in one of those new + features (breaking `~paramiko.client.SSHClient.invoke_shell` with an + ``AttributeError``.) The offending code has been stripped out of the 2.0.x + line (but of course, remains in 2.1.x and above.) +- :bug:`859` (via :issue:`860`) A tweak to the original patch implementing + :issue:`398` was not fully applied, causing calls to + `~paramiko.client.SSHClient.invoke_shell` to fail with ``AttributeError``. + This has been fixed. Patch credit: Kirk Byers. +- :release:`2.1.0 <2016-12-09>` +- :release:`2.0.3 <2016-12-09>` +- :release:`1.18.0 <2016-12-09>` +- :release:`1.17.3 <2016-12-09>` +- :bug:`802 (1.17+)` (via :issue:`804`) Update our vendored Windows API module + to address errors of the form ``AttributeError: 'module' object has no + attribute 'c_ssize_t'``. Credit to Jason R. Coombs. +- :bug:`824 (1.17+)` Fix the implementation of ``PKey.write_private_key_file`` + (this method is only publicly defined on subclasses; the fix was in the + private real implementation) so it passes the correct params to ``open()``. + This bug apparently went unnoticed and unfixed for 12 entire years. Congrats + to John Villalovos for noticing & submitting the patch! +- :support:`801 backported (1.17+)` Skip a Unix-only test when on Windows; + thanks to Gabi Davar. +- :support:`792 backported (1.17+)` Minor updates to the README and demos; + thanks to Alan Yee. +- :feature:`780 (1.18+)` (also :issue:`779`, and may help users affected by + :issue:`520`) Add an optional ``timeout`` parameter to + `Transport.start_client ` (and + feed it the value of the configured connection timeout when used within + `SSHClient `.) This helps prevent situations where + network connectivity isn't timing out, but the remote server is otherwise + unable to service the connection in a timely manner. Credit to + ``@sanseihappa``. +- :bug:`742` (also re: :issue:`559`) Catch ``AssertionError`` thrown by + Cryptography when attempting to load bad ECDSA keys, turning it into an + ``SSHException``. This moves the behavior in line with other "bad keys" + situations, re: Paramiko's main auth loop. Thanks to MengHuan Yu for the + patch. +- :bug:`789 (1.17+)` Add a missing ``.closed`` attribute (plus ``._closed`` + because reasons) to `ProxyCommand ` so the + earlier partial fix for :issue:`520` works in situations where one is + gatewaying via ``ProxyCommand``. +- :bug:`334 (1.17+)` Make the ``subprocess`` import in ``proxy.py`` lazy so + users on platforms without it (such as Google App Engine) can import Paramiko + successfully. (Relatedly, make it easier to tweak an active socket check + timeout [in `Transport `] which was previously + hardcoded.) Credit: Shinya Okano. +- :support:`854 backported (1.17+)` Fix incorrect docstring/param-list for + `Transport.auth_gssapi_keyex + ` so it matches the real + signature. Caught by ``@Score_Under``. +- :bug:`681 (1.17+)` Fix a Python3-specific bug re: the handling of read + buffers when using ``ProxyCommand``. Thanks to Paul Kapp for catch & patch. +- :feature:`398 (1.18+)` Add an ``environment`` dict argument to + `Client.exec_command ` (plus the + lower level `Channel.update_environment + ` and + `Channel.set_environment_variable + ` methods) which + implements the ``env`` SSH message type. This means the remote shell + environment can be set without the use of ``VARNAME=value`` shell tricks, + provided the server's ``AcceptEnv`` lists the variables you need to set. + Thanks to Philip Lorenz for the pull request. +- :support:`819 backported (>=1.15,<2.0)` Document how lacking ``gmp`` headers + at install time can cause a significant performance hit if you build PyCrypto + from source. (Most system-distributed packages already have this enabled.) +- :release:`2.0.2 <2016-07-25>` +- :release:`1.17.2 <2016-07-25>` +- :release:`1.16.3 <2016-07-25>` +- :bug:`673 (1.16+)` (via :issue:`681`) Fix protocol banner read errors (``SSHException``) which would occasionally pop up when using ``ProxyCommand`` gatewaying. Thanks to ``@Depado`` for the initial report and Paul Kapp for the fix. -* :bug:`774 (1.16+)` Add a ``_closed`` private attribute to +- :bug:`774 (1.16+)` Add a ``_closed`` private attribute to `~paramiko.channel.Channel` objects so that they continue functioning when used as proxy sockets under Python 3 (e.g. as ``direct-tcpip`` gateways for other Paramiko connections.) -* :bug:`758 (1.16+)` Apply type definitions to ``_winapi`` module from +- :bug:`758 (1.16+)` Apply type definitions to ``_winapi`` module from `jaraco.windows `_ 3.6.1. This should address issues on Windows platforms that often result in errors like ``ArgumentError: [...] int too long to convert``. Thanks to ``@swohlerLL`` for the report and Jason R. Coombs for the patch. -* :release:`2.0.1 <2016-06-21>` -* :release:`1.17.1 <2016-06-21>` -* :release:`1.16.2 <2016-06-21>` -* :bug:`520 (1.16+)` (Partial fix) Fix at least one instance of race condition +- :release:`2.0.1 <2016-06-21>` +- :release:`1.17.1 <2016-06-21>` +- :release:`1.16.2 <2016-06-21>` +- :bug:`520 (1.16+)` (Partial fix) Fix at least one instance of race condition driven threading hangs at end of the Python interpreter session. (Includes a docs update as well - always make sure to ``.close()`` your clients!) -* :bug:`537 (1.16+)` Fix a bug in `BufferedPipe.set_event +- :bug:`537 (1.16+)` Fix a bug in `BufferedPipe.set_event ` which could cause deadlocks/hangs when one uses `select.select` against `~paramiko.channel.Channel` objects (or otherwise calls `Channel.fileno ` after the channel has closed). Thanks to Przemysław Strzelczak for the report & reproduction case, and to Krzysztof Rusek for the fix. -* :release:`2.0.0 <2016-04-28>` -* :release:`1.17.0 <2016-04-28>` -* :release:`1.16.1 <2016-04-28>` -* :release:`1.15.5 <2016-04-28>` -* :feature:`731` (working off the earlier :issue:`611`) Add support for 384- +- :release:`2.0.0 <2016-04-28>` +- :release:`1.17.0 <2016-04-28>` +- :release:`1.16.1 <2016-04-28>` +- :release:`1.15.5 <2016-04-28>` +- :feature:`731` (working off the earlier :issue:`611`) Add support for 384- and 512-bit elliptic curve groups in ECDSA key types (aka ``ecdsa-sha2-nistp384`` / ``ecdsa-sha2-nistp521``). Thanks to Michiel Tiller and ``@CrazyCasta`` for the patches. -* :bug:`670` Due to an earlier bugfix, less-specific ``Host`` blocks' +- :bug:`670` Due to an earlier bugfix, less-specific ``Host`` blocks' ``ProxyCommand`` values were overriding ``ProxyCommand none`` in more-specific ``Host`` blocks. This has been fixed in a backwards compatible manner (i.e. ``ProxyCommand none`` continues to appear as a total lack of any ``proxycommand`` key in parsed config structures). Thanks to Pat Brisbin for the catch. -* :bug:`676` (via :issue:`677`) Fix a backwards incompatibility issue that - cropped up in `SFTPFile.prefetch <~paramiko.sftp_file.prefetch>` re: the - erroneously non-optional ``file_size`` parameter. Should only affect users - who manually call ``prefetch``. Thanks to ``@stevevanhooser`` for catch & - patch. -* :feature:`394` Replace PyCrypto with the Python Cryptographic Authority +- :bug:`676` (via :issue:`677`) Fix a backwards incompatibility issue that + cropped up in `SFTPFile.prefetch ` re: + the erroneously non-optional ``file_size`` parameter. Should only affect + users who manually call ``prefetch``. Thanks to ``@stevevanhooser`` for catch + & patch. +- :feature:`394` Replace PyCrypto with the Python Cryptographic Authority (PyCA) 'Cryptography' library suite. This improves security, installability, and performance; adds PyPy support; and much more. @@ -72,88 +848,88 @@ Changelog simply ``pip install -U paramiko`` (especially if you **upgrade to pip 8**). -* :bug:`577` (via :issue:`578`; should also fix :issue:`718`, :issue:`560`) Fix +- :bug:`577` (via :issue:`578`; should also fix :issue:`718`, :issue:`560`) Fix stalled/hung SFTP downloads by cleaning up some threading lock issues. Thanks to Stephen C. Pope for the patch. -* :bug:`716` Fix a Python 3 compatibility issue when handling two-factor +- :bug:`716` Fix a Python 3 compatibility issue when handling two-factor authentication. Thanks to Mateusz Kowalski for the catch & original patch. -* :support:`729 backported (>=1.15,<2.0)` Clean up ``setup.py`` to always use +- :support:`729 backported (>=1.15,<2.0)` Clean up ``setup.py`` to always use ``setuptools``, not doing so was a historical artifact from bygone days. Thanks to Alex Gaynor. -* :bug:`649 major (==1.17)` Update the module in charge of handling SSH moduli +- :bug:`649 major (==1.17)` Update the module in charge of handling SSH moduli so it's consistent with OpenSSH behavior re: prime number selection. Thanks to Damien Tournoud for catch & patch. -* :bug:`617` (aka `fabric/fabric#1429 +- :bug:`617` (aka `fabric/fabric#1429 `_; via :issue:`679`; related: :issue:`678`, :issue:`685`, :issue:`615` & :issue:`616`) Fix up `~paramiko.ssh_exception.NoValidConnectionsError` so it pickles correctly, and fix a related Python 3 compatibility issue. Thanks to Rebecca Schlussel for the report & Marius Gedminas for the patch. -* :bug:`613` (via :issue:`619`) Update to ``jaraco.windows`` 3.4.1 to fix some +- :bug:`613` (via :issue:`619`) Update to ``jaraco.windows`` 3.4.1 to fix some errors related to ``ctypes`` on Windows platforms. Credit to Jason R. Coombs. -* :support:`621 backported (>=1.15,<2.0)` Annotate some public attributes on +- :support:`621 backported (>=1.15,<2.0)` Annotate some public attributes on `~paramiko.channel.Channel` such as ``.closed``. Thanks to Sergey Vasilyev for the report. -* :bug:`632` Fix logic bug in the SFTP client's callback-calling functionality; +- :bug:`632` Fix logic bug in the SFTP client's callback-calling functionality; previously there was a chance the given callback would fire twice at the end of a transfer. Thanks to ``@ab9-er`` for catch & original patch. -* :support:`612 backported (>=1.15,<2.0)` Identify & work around a race +- :support:`612 backported (>=1.15,<2.0)` Identify & work around a race condition in the test for handshake timeouts, which was causing frequent test failures for a subset of contributors as well as Travis-CI (usually, but not always, limited to Python 3.5). Props to Ed Kellett for assistance during some of the troubleshooting. -* :support:`697 backported (>=1.15,<2.0)` Remove whitespace in our +- :support:`697 backported (>=1.15,<2.0)` Remove whitespace in our ``setup.py``'s ``install_requires`` as it triggers occasional bugs in some versions of ``setuptools``. Thanks to Justin Lecher for catch & original patch. -* :bug:`499` Strip trailing/leading whitespace from lines when parsing SSH +- :bug:`499` Strip trailing/leading whitespace from lines when parsing SSH config files - this brings things in line with OpenSSH behavior. Thanks to Alfredo Esteban for the original report and Nick Pillitteri for the patch. -* :bug:`652` Fix behavior of ``gssapi-with-mic`` auth requests so they fail +- :bug:`652` Fix behavior of ``gssapi-with-mic`` auth requests so they fail gracefully (allowing followup via other auth methods) instead of raising an exception. Patch courtesy of ``@jamercee``. -* :feature:`588 (==1.17)` Add missing file-like object methods for +- :feature:`588 (==1.17)` Add missing file-like object methods for `~paramiko.file.BufferedFile` and `~paramiko.sftp_file.SFTPFile`. Thanks to Adam Meily for the patch. -* :support:`636 backported (>=1.15,<2.0)` Clean up and enhance the README (and +- :support:`636 backported (>=1.15,<2.0)` Clean up and enhance the README (and rename it to ``README.rst`` from just ``README``). Thanks to ``@LucasRMehl``. -* :release:`1.16.0 <2015-11-04>` -* :bug:`194 major` (also :issue:`562`, :issue:`530`, :issue:`576`) Streamline +- :release:`1.16.0 <2015-11-04>` +- :bug:`194 major` (also :issue:`562`, :issue:`530`, :issue:`576`) Streamline use of ``stat`` when downloading SFTP files via `SFTPClient.get `; this avoids triggering bugs in some off-spec SFTP servers such as IBM Sterling. Thanks to ``@muraleee`` for the initial report and to Torkil Gustavsen for the patch. -* :feature:`467` (also :issue:`139`, :issue:`412`) Fully enable two-factor +- :feature:`467` (also :issue:`139`, :issue:`412`) Fully enable two-factor authentication (e.g. when a server requires ``AuthenticationMethods pubkey,keyboard-interactive``). Thanks to ``@perryjrandall`` for the patch and to ``@nevins-b`` and Matt Robenolt for additional support. -* :bug:`502 major` Fix 'exec' requests in server mode to use ``get_string`` +- :bug:`502 major` Fix 'exec' requests in server mode to use ``get_string`` instead of ``get_text`` to avoid ``UnicodeDecodeError`` on non-UTF-8 input. Thanks to Anselm Kruis for the patch & discussion. -* :bug:`401` Fix line number reporting in log output regarding invalid +- :bug:`401` Fix line number reporting in log output regarding invalid ``known_hosts`` line entries. Thanks to Dylan Thacker-Smith for catch & patch. -* :support:`525 backported` Update the vendored Windows API addon to a more +- :support:`525 backported` Update the vendored Windows API addon to a more recent edition. Also fixes :issue:`193`, :issue:`488`, :issue:`498`. Thanks to Jason Coombs. -* :release:`1.15.4 <2015-11-02>` -* :release:`1.14.3 <2015-11-02>` -* :release:`1.13.4 <2015-11-02>` -* :bug:`366` Fix `~paramiko.sftp_attributes.SFTPAttributes` so its string +- :release:`1.15.4 <2015-11-02>` +- :release:`1.14.3 <2015-11-02>` +- :release:`1.13.4 <2015-11-02>` +- :bug:`366` Fix `~paramiko.sftp_attr.SFTPAttributes` so its string representation doesn't raise exceptions on empty/initialized instances. Patch by Ulrich Petri. -* :bug:`359` Use correct attribute name when trying to use Python 3's +- :bug:`359` Use correct attribute name when trying to use Python 3's ``int.bit_length`` method; prior to fix, the Python 2 custom fallback implementation was always used, even on Python 3. Thanks to Alex Gaynor. -* :support:`594 backported` Correct some post-Python3-port docstrings to +- :support:`594 backported` Correct some post-Python3-port docstrings to specify ``bytes`` type instead of ``str``. Credit to ``@redixin``. -* :bug:`565` Don't explode with ``IndexError`` when reading private key files +- :bug:`565` Don't explode with ``IndexError`` when reading private key files lacking an ``-----END PRIVATE KEY-----`` footer. Patch courtesy of Prasanna Santhanam. -* :feature:`604` Add support for the ``aes192-ctr`` and ``aes192-cbc`` ciphers. +- :feature:`604` Add support for the ``aes192-ctr`` and ``aes192-cbc`` ciphers. Thanks to Michiel Tiller for noticing it was as easy as tweaking some key sizes :D -* :feature:`356` (also :issue:`596`, :issue:`365`, :issue:`341`, :issue:`164`, +- :feature:`356` (also :issue:`596`, :issue:`365`, :issue:`341`, :issue:`164`, :issue:`581`, and a bunch of other duplicates besides) Add support for SHA-2 based key exchange (kex) algorithm ``diffie-hellman-group-exchange-sha256`` and (H)MAC algorithms ``hmac-sha2-256`` and ``hmac-sha2-512``. @@ -167,98 +943,99 @@ Changelog and in no particular order: Matthias Witte, Dag Wieers, Ash Berlin, Etienne Perot, Gert van Dijk, ``@GuyShaanan``, Aaron Bieber, ``@cyphase``, and Eric Brown. -* :release:`1.15.3 <2015-10-02>` -* :support:`554 backported` Fix inaccuracies in the docstring for the ECDSA key +- :release:`1.15.3 <2015-10-02>` +- :support:`554 backported` Fix inaccuracies in the docstring for the ECDSA key class. Thanks to Jared Hance for the patch. -* :support:`516 backported` Document `~paramiko.agent.AgentRequestHandler`. +- :support:`516 backported` Document `~paramiko.agent.AgentRequestHandler`. Thanks to ``@toejough`` for report & suggestions. -* :bug:`496 (1.15+)` Fix a handful of small but critical bugs in Paramiko's +- :bug:`496 (1.15+)` Fix a handful of small but critical bugs in Paramiko's GSSAPI support (note: this includes switching from PyCrypo's Random to `os.urandom`). Thanks to Anselm Kruis for catch & patch. -* :bug:`491` (combines :issue:`62` and :issue:`439`) Implement timeout +- :bug:`491` (combines :issue:`62` and :issue:`439`) Implement timeout functionality to address hangs from dropped network connections and/or failed handshakes. Credit to ``@vazir`` and ``@dacut`` for the original patches and to Olle Lundberg for reimplementation. -* :bug:`490` Skip invalid/unparseable lines in ``known_hosts`` files, instead +- :bug:`490` Skip invalid/unparseable lines in ``known_hosts`` files, instead of raising `~paramiko.ssh_exception.SSHException`. This brings Paramiko's behavior more in line with OpenSSH, which silently ignores such input. Catch & patch courtesy of Martin Topholm. -* :bug:`404` Print details when displaying +- :bug:`404` Print details when displaying `~paramiko.ssh_exception.BadHostKeyException` objects (expected vs received data) instead of just "hey shit broke". Patch credit: Loic Dachary. -* :bug:`469` (also :issue:`488`, :issue:`461` and like a dozen others) Fix a +- :bug:`469` (also :issue:`488`, :issue:`461` and like a dozen others) Fix a typo introduced in the 1.15 release which broke WinPageant support. Thanks to everyone who submitted patches, and to Steve Cohen who was the lucky winner of the cherry-pick lottery. -* :bug:`353` (via :issue:`482`) Fix a bug introduced in the Python 3 port +- :bug:`353` (via :issue:`482`) Fix a bug introduced in the Python 3 port which caused ``OverFlowError`` (and other symptoms) in SFTP functionality. Thanks to ``@dboreham`` for leading the troubleshooting charge, and to Scott Maxwell for the final patch. -* :support:`582` Fix some old ``setup.py`` related helper code which was +- :support:`582` Fix some old ``setup.py`` related helper code which was breaking ``bdist_dumb`` on Mac OS X. Thanks to Peter Odding for the patch. -* :bug:`22 major` Try harder to connect to multiple network families (e.g. IPv4 +- :bug:`22 major` Try harder to connect to multiple network families (e.g. IPv4 vs IPv6) in case of connection issues; this helps with problems such as hosts which resolve both IPv4 and IPv6 addresses but are only listening on IPv4. Thanks to Dries Desmet for original report and Torsten Landschoff for the foundational patchset. -* :bug:`402` Check to see if an SSH agent is actually present before trying to +- :bug:`402` Check to see if an SSH agent is actually present before trying to forward it to the remote end. This replaces what was usually a useless ``TypeError`` with a human-readable `~paramiko.ssh_exception.AuthenticationException`. Credit to Ken Jordan for the fix and Yvan Marques for original report. -* :release:`1.15.2 <2014-12-19>` -* :release:`1.14.2 <2014-12-19>` -* :release:`1.13.3 <2014-12-19>` -* :bug:`413` (also :issue:`414`, :issue:`420`, :issue:`454`) Be significantly +- :release:`1.15.2 <2014-12-19>` +- :release:`1.14.2 <2014-12-19>` +- :release:`1.13.3 <2014-12-19>` +- :bug:`413` (also :issue:`414`, :issue:`420`, :issue:`454`) Be significantly smarter about polling & timing behavior when running proxy commands, to avoid unnecessary (often 100%!) CPU usage. Major thanks to Jason Dunsmore for report & initial patchset and to Chris Adams & John Morrissey for followup improvements. -* :bug:`455` Tweak packet size handling to conform better to the OpenSSH RFCs; +- :bug:`455` Tweak packet size handling to conform better to the OpenSSH RFCs; this helps address issues with interactive program cursors. Courtesy of Jeff Quast. -* :bug:`428` Fix an issue in `~paramiko.file.BufferedFile` (primarily used in +- :bug:`428` Fix an issue in `~paramiko.file.BufferedFile` (primarily used in the SFTP modules) concerning incorrect behavior by `~paramiko.file.BufferedFile.readlines` on files whose size exceeds the buffer size. Thanks to ``@achapp`` for catch & patch. -* :bug:`415` Fix ``ssh_config`` parsing to correctly interpret ``ProxyCommand +- :bug:`415` Fix ``ssh_config`` parsing to correctly interpret ``ProxyCommand none`` as the lack of a proxy command, instead of as a literal command string of ``"none"``. Thanks to Richard Spiers for the catch & Sean Johnson for the fix. -* :support:`431 backported` Replace handrolled ``ssh_config`` parsing code with +- :support:`431 backported` Replace handrolled ``ssh_config`` parsing code with use of the ``shlex`` module. Thanks to Yan Kalchevskiy. -* :support:`422 backported` Clean up some unused imports. Courtesy of Olle +- :support:`422 backported` Clean up some unused imports. Courtesy of Olle Lundberg. -* :support:`421 backported` Modernize threading calls to use newer API. Thanks +- :support:`421 backported` Modernize threading calls to use newer API. Thanks to Olle Lundberg. -* :support:`419 backported` Modernize a bunch of the codebase internals to +- :support:`419 backported` Modernize a bunch of the codebase internals to leverage decorators. Props to ``@beckjake`` for realizing we're no longer on Python 2.2 :D -* :bug:`266` Change numbering of `~paramiko.transport.Transport` channels to +- :bug:`266` Change numbering of `~paramiko.transport.Transport` channels to start at 0 instead of 1 for better compatibility with OpenSSH & certain server implementations which break on 1-indexed channels. Thanks to ``@egroeper`` for catch & patch. -* :bug:`459` Tighten up agent connection closure behavior to avoid spurious +- :bug:`459` Tighten up agent connection closure behavior to avoid spurious ``ResourceWarning`` display in some situations. Thanks to ``@tkrapp`` for the catch. -* :bug:`429` Server-level debug message logging was overlooked during the +- :bug:`429` Server-level debug message logging was overlooked during the Python 3 compatibility update; Python 3 clients attempting to log SSH debug packets encountered type errors. This is now fixed. Thanks to ``@mjmaenpaa`` for the catch. -* :bug:`320` Update our win_pageant module to be Python 3 compatible. Thanks to +- :bug:`320` Update our win_pageant module to be Python 3 compatible. Thanks to ``@sherbang`` and ``@adamkerz`` for the patches. -* :release:`1.15.1 <2014-09-22>` -* :bug:`399` SSH agent forwarding (potentially other functionality as +- :release:`1.15.1 <2014-09-22>` +- :bug:`399` SSH agent forwarding (potentially other functionality as well) would hang due to incorrect values passed into the new window size - arguments for `.Transport` (thanks to a botched merge). This has been - corrected. Thanks to Dylan Thacker-Smith for the report & patch. -* :feature:`167` Add `~paramiko.config.SSHConfig.get_hostnames` for easier + arguments for `~paramiko.transport.Transport` (thanks to a botched merge). + This has been corrected. Thanks to Dylan Thacker-Smith for the report & + patch. +- :feature:`167` Add `~paramiko.config.SSHConfig.get_hostnames` for easier introspection of a loaded SSH config file or object. Courtesy of Søren Løvborg. -* :release:`1.15.0 <2014-09-18>` -* :support:`393` Replace internal use of PyCrypto's ``SHA.new`` with the +- :release:`1.15.0 <2014-09-18>` +- :support:`393` Replace internal use of PyCrypto's ``SHA.new`` with the stdlib's ``hashlib.sha1``. Thanks to Alex Gaynor. -* :feature:`267` (also :issue:`250`, :issue:`241`, :issue:`228`) Add GSS-API / +- :feature:`267` (also :issue:`250`, :issue:`241`, :issue:`228`) Add GSS-API / SSPI (e.g. Kerberos) key exchange and authentication support (:ref:`installation docs here `). Mega thanks to Sebastian Deiß, with assist by Torsten Landschoff. @@ -268,24 +1045,24 @@ Changelog requirement for using this functionality) only appears to support Python 2.7 and up at this time. -* :bug:`346 major` Fix an issue in private key files' encryption salts that +- :bug:`346 major` Fix an issue in private key files' encryption salts that could cause tracebacks and file corruption if keys were re-encrypted. Credit to Xavier Nunn. -* :feature:`362` Allow users to control the SSH banner timeout. Thanks to Cory +- :feature:`362` Allow users to control the SSH banner timeout. Thanks to Cory Benfield. -* :feature:`372` Update default window & packet sizes to more closely adhere to +- :feature:`372` Update default window & packet sizes to more closely adhere to the pertinent RFC; also expose these settings in the public API so they may be overridden by client code. This should address some general speed issues such as :issue:`175`. Big thanks to Olle Lundberg for the update. -* :bug:`373 major` Attempt to fix a handful of issues (such as :issue:`354`) +- :bug:`373 major` Attempt to fix a handful of issues (such as :issue:`354`) related to infinite loops and threading deadlocks. Thanks to Olle Lundberg as well as a handful of community members who provided advice & feedback via IRC. -* :support:`374` (also :issue:`375`) Old code cleanup courtesy of Olle +- :support:`374` (also :issue:`375`) Old code cleanup courtesy of Olle Lundberg. -* :support:`377` Factor `~paramiko.channel.Channel` openness sanity check into +- :support:`377` Factor `~paramiko.channel.Channel` openness sanity check into a decorator. Thanks to Olle Lundberg for original patch. -* :bug:`298 major` Don't perform point validation on ECDSA keys in +- :bug:`298 major` Don't perform point validation on ECDSA keys in ``known_hosts`` files, since a) this can cause significant slowdown when such keys exist, and b) ``known_hosts`` files are implicitly trustworthy. Thanks to Kieran Spear for catch & patch. @@ -294,56 +1071,56 @@ Changelog This change bumps up the version requirement for the ``ecdsa`` library to ``0.11``. -* :bug:`234 major` Lower logging levels for a few overly-noisy log messages +- :bug:`234 major` Lower logging levels for a few overly-noisy log messages about secure channels. Thanks to David Pursehouse for noticing & contributing the fix. -* :feature:`218` Add support for ECDSA private keys on the client side. Thanks +- :feature:`218` Add support for ECDSA private keys on the client side. Thanks to ``@aszlig`` for the patch. -* :bug:`335 major` Fix ECDSA key generation (generation of brand new ECDSA keys +- :bug:`335 major` Fix ECDSA key generation (generation of brand new ECDSA keys was broken previously). Thanks to ``@solarw`` for catch & patch. -* :feature:`184` Support quoted values in SSH config file parsing. Credit to +- :feature:`184` Support quoted values in SSH config file parsing. Credit to Yan Kalchevskiy. -* :feature:`131` Add a `~paramiko.sftp_client.SFTPClient.listdir_iter` method +- :feature:`131` Add a `~paramiko.sftp_client.SFTPClient.listdir_iter` method to `~paramiko.sftp_client.SFTPClient` allowing for more efficient, async/generator based file listings. Thanks to John Begeman. -* :support:`378 backported` Minor code cleanup in the SSH config module +- :support:`378 backported` Minor code cleanup in the SSH config module courtesy of Olle Lundberg. -* :support:`249 backported` Consolidate version information into one spot. +- :support:`249 backported` Consolidate version information into one spot. Thanks to Gabi Davar for the reminder. -* :release:`1.14.1 <2014-08-25>` -* :release:`1.13.2 <2014-08-25>` -* :bug:`376` Be less aggressive about expanding variables in ``ssh_config`` +- :release:`1.14.1 <2014-08-25>` +- :release:`1.13.2 <2014-08-25>` +- :bug:`376` Be less aggressive about expanding variables in ``ssh_config`` files, which results in a speedup of SSH config parsing. Credit to Olle Lundberg. -* :support:`324 backported` A bevvy of documentation typo fixes, courtesy of Roy +- :support:`324 backported` A bevvy of documentation typo fixes, courtesy of Roy Wellington. -* :bug:`312` `paramiko.transport.Transport` had a bug in its ``__repr__`` which +- :bug:`312` `paramiko.transport.Transport` had a bug in its ``__repr__`` which surfaces during errors encountered within its ``__init__``, causing problematic tracebacks in such situations. Thanks to Simon Percivall for catch & patch. -* :bug:`272` Fix a bug where ``known_hosts`` parsing hashed the input hostname +- :bug:`272` Fix a bug where ``known_hosts`` parsing hashed the input hostname as well as the hostnames from the ``known_hosts`` file, on every comparison. Thanks to ``@sigmunau`` for final patch and ``@ostacey`` for the original report. -* :bug:`239` Add Windows-style CRLF support to SSH config file parsing. Props +- :bug:`239` Add Windows-style CRLF support to SSH config file parsing. Props to Christopher Swenson. -* :support:`229 backported` Fix a couple of incorrectly-copied docstrings' ``.. +- :support:`229 backported` Fix a couple of incorrectly-copied docstrings' ``.. versionadded::`` RST directives. Thanks to Aarni Koskela for the catch. -* :support:`169 backported` Minor refactor of +- :support:`169 backported` Minor refactor of `paramiko.sftp_client.SFTPClient.put` thanks to Abhinav Upadhyay. -* :bug:`285` (also :issue:`352`) Update our Python 3 ``b()`` compatibility shim +- :bug:`285` (also :issue:`352`) Update our Python 3 ``b()`` compatibility shim to handle ``buffer`` objects correctly; this fixes a frequently reported issue affecting many users, including users of the ``bzr`` software suite. Thanks to ``@basictheprogram`` for the initial report, Jelmer Vernooij for the fix and Andrew Starr-Bochicchio & Jeremy T. Bouse (among others) for discussion & feedback. -* :support:`371` Add Travis support & docs update for Python 3.4. Thanks to +- :support:`371` Add Travis support & docs update for Python 3.4. Thanks to Olle Lundberg. -* :release:`1.14.0 <2014-05-07>` -* :release:`1.13.1 <2014-05-07>` -* :release:`1.12.4 <2014-05-07>` -* :release:`1.11.6 <2014-05-07>` -* :bug:`-` `paramiko.file.BufferedFile.read` incorrectly returned text strings +- :release:`1.14.0 <2014-05-07>` +- :release:`1.13.1 <2014-05-07>` +- :release:`1.12.4 <2014-05-07>` +- :release:`1.11.6 <2014-05-07>` +- :bug:`-` `paramiko.file.BufferedFile.read` incorrectly returned text strings after the Python 3 migration, despite bytes being more appropriate for file contents (which may be binary or of an unknown encoding.) This has been addressed. @@ -356,33 +1133,33 @@ Changelog This should fix `this issue raised on the Obnam mailing list `_. Thanks to Antoine Brenner for the patch. -* :bug:`-` Added self.args for exception classes. Used for unpickling. Related +- :bug:`-` Added self.args for exception classes. Used for unpickling. Related to (`Fabric #986 `_, `Fabric #714 `_). Thanks to Alex Plugaru. -* :bug:`-` Fix logging error in sftp_client for filenames containing the '%' +- :bug:`-` Fix logging error in sftp_client for filenames containing the '%' character. Thanks to Antoine Brenner. -* :bug:`308` Fix regression in dsskey.py that caused sporadic signature +- :bug:`308` Fix regression in dsskey.py that caused sporadic signature verification failures. Thanks to Chris Rose. -* :support:`299` Use deterministic signatures for ECDSA keys for improved +- :support:`299` Use deterministic signatures for ECDSA keys for improved security. Thanks to Alex Gaynor. -* :support:`297` Replace PyCrypto's ``Random`` with `os.urandom` for improved +- :support:`297` Replace PyCrypto's ``Random`` with `os.urandom` for improved speed and security. Thanks again to Alex. -* :support:`295` Swap out a bunch of PyCrypto hash functions with use of +- :support:`295` Swap out a bunch of PyCrypto hash functions with use of `hashlib`. Thanks to Alex Gaynor. -* :support:`290` (also :issue:`292`) Add support for building universal +- :support:`290` (also :issue:`292`) Add support for building universal (Python 2+3 compatible) wheel files during the release process. Courtesy of Alex Gaynor. -* :support:`284` Add Python language trove identifiers to ``setup.py``. Thanks +- :support:`284` Add Python language trove identifiers to ``setup.py``. Thanks to Alex Gaynor for catch & patch. -* :bug:`235` Improve string type testing in a handful of spots (e.g. ``s/if +- :bug:`235` Improve string type testing in a handful of spots (e.g. ``s/if type(x) is str/if isinstance(x, basestring)/g``.) Thanks to ``@ksamuel`` for the report. -* :release:`1.13.0 <2014-03-13>` -* :release:`1.12.3 <2014-03-13>` -* :release:`1.11.5 <2014-03-13>` -* :release:`1.10.7 <2014-03-13>` -* :feature:`16` **Python 3 support!** Our test suite passes under Python 3, and +- :release:`1.13.0 <2014-03-13>` +- :release:`1.12.3 <2014-03-13>` +- :release:`1.11.5 <2014-03-13>` +- :release:`1.10.7 <2014-03-13>` +- :feature:`16` **Python 3 support!** Our test suite passes under Python 3, and it (& Fabric's test suite) continues to pass under Python 2. **Python 2.5 is no longer supported with this change!** @@ -390,44 +1167,44 @@ Changelog feedback. In no particular order, we thank Daniel Goertzen, Ivan Kolodyazhny, Tomi Pieviläinen, Jason R. Coombs, Jan N. Schulze, ``@Lazik``, Dorian Pula, Scott Maxwell, Tshepang Lekhonkhobe, Aaron Meurer, and Dave Halter. -* :support:`256 backported` Convert API documentation to Sphinx, yielding a new +- :support:`256 backported` Convert API documentation to Sphinx, yielding a new API docs website to replace the old Epydoc one. Thanks to Olle Lundberg for the initial conversion work. -* :bug:`-` Use constant-time hash comparison operations where possible, to +- :bug:`-` Use constant-time hash comparison operations where possible, to protect against `timing-based attacks `_. Thanks to Alex Gaynor for the patch. -* :release:`1.12.2 <2014-02-14>` -* :release:`1.11.4 <2014-02-14>` -* :release:`1.10.6 <2014-02-14>` -* :feature:`58` Allow client code to access the stored SSH server banner via +- :release:`1.12.2 <2014-02-14>` +- :release:`1.11.4 <2014-02-14>` +- :release:`1.10.6 <2014-02-14>` +- :feature:`58` Allow client code to access the stored SSH server banner via `Transport.get_banner `. Thanks to ``@Jhoanor`` for the patch. -* :bug:`252` (`Fabric #1020 `_) +- :bug:`252` (`Fabric #1020 `_) Enhanced the implementation of ``ProxyCommand`` to avoid a deadlock/hang condition that frequently occurs at ``Transport`` shutdown time. Thanks to Mateusz Kobos, Matthijs van der Vleuten and Guillaume Zitta for the original reports and to Marius Gedminas for helping test nontrivial use cases. -* :bug:`268` Fix some missed renames of ``ProxyCommand`` related error classes. +- :bug:`268` Fix some missed renames of ``ProxyCommand`` related error classes. Thanks to Marius Gedminas for catch & patch. -* :bug:`34` (PR :issue:`35`) Fix SFTP prefetching incompatibility with some +- :bug:`34` (PR :issue:`35`) Fix SFTP prefetching incompatibility with some SFTP servers regarding request/response ordering. Thanks to Richard Kettlewell. -* :bug:`193` (and its attentant PRs :issue:`230` & :issue:`253`) Fix SSH agent +- :bug:`193` (and its attentant PRs :issue:`230` & :issue:`253`) Fix SSH agent problems present on Windows. Thanks to David Hobbs for initial report and to Aarni Koskela & Olle Lundberg for the patches. -* :release:`1.12.1 <2014-01-08>` -* :release:`1.11.3 <2014-01-08>` -* :release:`1.10.5 <2014-01-08>` -* :bug:`225 (1.12+)` Note ecdsa requirement in README. Thanks to Amaury +- :release:`1.12.1 <2014-01-08>` +- :release:`1.11.3 <2014-01-08>` +- :release:`1.10.5 <2014-01-08>` +- :bug:`225 (1.12+)` Note ecdsa requirement in README. Thanks to Amaury Rodriguez for the catch. -* :bug:`176` Fix AttributeError bugs in known_hosts file (re)loading. Thanks +- :bug:`176` Fix AttributeError bugs in known_hosts file (re)loading. Thanks to Nathan Scowcroft for the patch & Martin Blumenstingl for the initial test case. -* :release:`1.12.0 <2013-09-27>` -* :release:`1.11.2 <2013-09-27>` -* :release:`1.10.4 <2013-09-27>` -* :feature:`152` Add tentative support for ECDSA keys. **This adds the ecdsa +- :release:`1.12.0 <2013-09-27>` +- :release:`1.11.2 <2013-09-27>` +- :release:`1.10.4 <2013-09-27>` +- :feature:`152` Add tentative support for ECDSA keys. **This adds the ecdsa module as a new dependency of Paramiko.** The module is available at `warner/python-ecdsa on Github `_ and `ecdsa on PyPI `_. @@ -437,88 +1214,88 @@ Changelog what you have in your known_hosts file. * Mega thanks to Ethan Glasser-Camp for the patch. -* :feature:`136` Add server-side support for the SSH protocol's 'env' command. +- :feature:`136` Add server-side support for the SSH protocol's 'env' command. Thanks to Benjamin Pollack for the patch. -* :bug:`156 (1.11+)` Fix potential deadlock condition when using Channel +- :bug:`156 (1.11+)` Fix potential deadlock condition when using Channel objects as sockets (e.g. when using SSH gatewaying). Thanks to Steven Noonan and Frank Arnold for catch & patch. -* :bug:`179` Fix a missing variable causing errors when an ssh_config file has +- :bug:`179` Fix a missing variable causing errors when an ssh_config file has a non-default AddressFamily set. Thanks to Ed Marshall & Tomaz Muraus for catch & patch. -* :bug:`200` Fix an exception-causing typo in ``demo_simple.py``. Thanks to Alex +- :bug:`200` Fix an exception-causing typo in ``demo_simple.py``. Thanks to Alex Buchanan for catch & Dave Foster for patch. -* :bug:`199` Typo fix in the license header cross-project. Thanks to Armin +- :bug:`199` Typo fix in the license header cross-project. Thanks to Armin Ronacher for catch & patch. -* :release:`1.11.1 <2013-09-20>` -* :release:`1.10.3 <2013-09-20>` -* :bug:`162` Clean up HMAC module import to avoid deadlocks in certain uses of +- :release:`1.11.1 <2013-09-20>` +- :release:`1.10.3 <2013-09-20>` +- :bug:`162` Clean up HMAC module import to avoid deadlocks in certain uses of SSHClient. Thanks to Gernot Hillier for the catch & suggested fix. -* :bug:`36` Fix the port-forwarding demo to avoid file descriptor errors. +- :bug:`36` Fix the port-forwarding demo to avoid file descriptor errors. Thanks to Jonathan Halcrow for catch & patch. -* :bug:`168` Update config handling to properly handle multiple 'localforward' +- :bug:`168` Update config handling to properly handle multiple 'localforward' and 'remoteforward' keys. Thanks to Emre Yılmaz for the patch. -* :release:`1.11.0 <2013-07-26>` -* :release:`1.10.2 <2013-07-26>` -* :bug:`98 major` On Windows, when interacting with the PuTTY PAgeant, Paramiko +- :release:`1.11.0 <2013-07-26>` +- :release:`1.10.2 <2013-07-26>` +- :bug:`98 major` On Windows, when interacting with the PuTTY PAgeant, Paramiko now creates the shared memory map with explicit Security Attributes of the user, which is the same technique employed by the canonical PuTTY library to avoid permissions issues when Paramiko is running under a different UAC context than the PuTTY Ageant process. Thanks to Jason R. Coombs for the patch. -* :support:`100` Remove use of PyWin32 in ``win_pageant`` module. Module was +- :support:`100` Remove use of PyWin32 in ``win_pageant`` module. Module was already dependent on ctypes for constructing appropriate structures and had ctypes implementations of all functionality. Thanks to Jason R. Coombs for the patch. -* :bug:`87 major` Ensure updates to ``known_hosts`` files account for any +- :bug:`87 major` Ensure updates to ``known_hosts`` files account for any updates to said files after Paramiko initially read them. (Includes related fix to guard against duplicate entries during subsequent ``known_hosts`` loads.) Thanks to ``@sunweaver`` for the contribution. -* :bug:`153` (also :issue:`67`) Warn on parse failure when reading known_hosts +- :bug:`153` (also :issue:`67`) Warn on parse failure when reading known_hosts file. Thanks to ``@glasserc`` for patch. -* :bug:`146` Indentation fixes for readability. Thanks to Abhinav Upadhyay for +- :bug:`146` Indentation fixes for readability. Thanks to Abhinav Upadhyay for catch & patch. -* :release:`1.10.1 <2013-04-05>` -* :bug:`142` (`Fabric #811 `_) +- :release:`1.10.1 <2013-04-05>` +- :bug:`142` (`Fabric #811 `_) SFTP put of empty file will still return the attributes of the put file. Thanks to Jason R. Coombs for the patch. -* :bug:`154` (`Fabric #876 `_) +- :bug:`154` (`Fabric #876 `_) Forwarded SSH agent connections left stale local pipes lying around, which could cause local (and sometimes remote or network) resource starvation when running many agent-using remote commands. Thanks to Kevin Tegtmeier for catch & patch. -* :release:`1.10.0 <2013-03-01>` -* :feature:`66` Batch SFTP writes to help speed up file transfers. Thanks to +- :release:`1.10.0 <2013-03-01>` +- :feature:`66` Batch SFTP writes to help speed up file transfers. Thanks to Olle Lundberg for the patch. -* :bug:`133 major` Fix handling of window-change events to be on-spec and not +- :bug:`133 major` Fix handling of window-change events to be on-spec and not attempt to wait for a response from the remote sshd; this fixes problems with less common targets such as some Cisco devices. Thanks to Phillip Heller for catch & patch. -* :feature:`93` Overhaul SSH config parsing to be in line with ``man +- :feature:`93` Overhaul SSH config parsing to be in line with ``man ssh_config`` (& the behavior of ``ssh`` itself), including addition of parameter expansion within config values. Thanks to Olle Lundberg for the patch. -* :feature:`110` Honor SSH config ``AddressFamily`` setting when looking up +- :feature:`110` Honor SSH config ``AddressFamily`` setting when looking up local host's FQDN. Thanks to John Hensley for the patch. -* :feature:`128` Defer FQDN resolution until needed, when parsing SSH config +- :feature:`128` Defer FQDN resolution until needed, when parsing SSH config files. Thanks to Parantapa Bhattacharya for catch & patch. -* :bug:`102 major` Forego random padding for packets when running under +- :bug:`102 major` Forego random padding for packets when running under ``*-ctr`` ciphers. This corrects some slowdowns on platforms where random byte generation is inefficient (e.g. Windows). Thanks to ``@warthog618`` for catch & patch, and Michael van der Kolff for code/technique review. -* :feature:`127` Turn ``SFTPFile`` into a context manager. Thanks to Michael +- :feature:`127` Turn ``SFTPFile`` into a context manager. Thanks to Michael Williamson for the patch. -* :feature:`116` Limit ``Message.get_bytes`` to an upper bound of 1MB to protect +- :feature:`116` Limit ``Message.get_bytes`` to an upper bound of 1MB to protect against potential DoS vectors. Thanks to ``@mvschaik`` for catch & patch. -* :feature:`115` Add convenience ``get_pty`` kwarg to ``Client.exec_command`` so +- :feature:`115` Add convenience ``get_pty`` kwarg to ``Client.exec_command`` so users not manually controlling a channel object can still toggle PTY creation. Thanks to Michael van der Kolff for the patch. -* :feature:`71` Add ``SFTPClient.putfo`` and ``.getfo`` methods to allow direct +- :feature:`71` Add ``SFTPClient.putfo`` and ``.getfo`` methods to allow direct uploading/downloading of file-like objects. Thanks to Eric Buehl for the patch. -* :feature:`113` Add ``timeout`` parameter to ``SSHClient.exec_command`` for +- :feature:`113` Add ``timeout`` parameter to ``SSHClient.exec_command`` for easier setting of the command's internal channel object's timeout. Thanks to Cernov Vladimir for the patch. -* :support:`94` Remove duplication of SSH port constant. Thanks to Olle +- :support:`94` Remove duplication of SSH port constant. Thanks to Olle Lundberg for the catch. -* :feature:`80` Expose the internal "is closed" property of the file transfer +- :feature:`80` Expose the internal "is closed" property of the file transfer class ``BufferedFile`` as ``.closed``, better conforming to Python's file interface. Thanks to ``@smunaut`` and James Hiscock for catch & patch. diff --git a/sites/www/conf.py b/sites/www/conf.py index c7ba0a867..00944871f 100644 --- a/sites/www/conf.py +++ b/sites/www/conf.py @@ -3,22 +3,22 @@ import os from os.path import abspath, join, dirname -sys.path.append(abspath(join(dirname(__file__), '..'))) +sys.path.append(abspath(join(dirname(__file__), ".."))) from shared_conf import * # Releases changelog extension -extensions.append('releases') +extensions.append("releases") releases_release_uri = "https://github.com/paramiko/paramiko/tree/%s" releases_issue_uri = "https://github.com/paramiko/paramiko/issues/%s" # Default is 'local' building, but reference the public docs site when building # under RTD. -target = join(dirname(__file__), '..', 'docs', '_build') -if os.environ.get('READTHEDOCS') == 'True': - target = 'http://docs.paramiko.org/en/latest/' -intersphinx_mapping['docs'] = (target, None) +target = join(dirname(__file__), "..", "docs", "_build") +if os.environ.get("READTHEDOCS") == "True": + target = "http://docs.paramiko.org/en/latest/" +intersphinx_mapping["docs"] = (target, None) # Sister-site links to API docs -html_theme_options['extra_nav_links'] = { - "API Docs": 'http://docs.paramiko.org', +html_theme_options["extra_nav_links"] = { + "API Docs": "http://docs.paramiko.org" } diff --git a/sites/www/contact.rst b/sites/www/contact.rst index 7e6c947e0..202815f8d 100644 --- a/sites/www/contact.rst +++ b/sites/www/contact.rst @@ -5,8 +5,8 @@ Contact You can get in touch with the developer & user community in any of the following ways: -* IRC: ``#paramiko`` on Freenode -* Mailing list: ``paramiko@librelist.com`` (see `the LibreList homepage - `_ for usage details). -* This website - a blog section is forthcoming. * Submit contributions on Github - see the :doc:`contributing` page. +* Follow ``@bitprophet`` on Twitter, though it's not a dedicated account and + mostly just retweets funny pictures. +* Subscribe to the ``paramiko`` category on the developer's blog: + http://bitprophet.org/categories/paramiko/ diff --git a/sites/www/index.rst b/sites/www/index.rst index b09ab5898..9c3bb3aae 100644 --- a/sites/www/index.rst +++ b/sites/www/index.rst @@ -1,18 +1,4 @@ -Welcome to Paramiko! -==================== - -Paramiko is a Python (2.6+, 3.3+) implementation of the SSHv2 protocol [#]_, -providing both client and server functionality. While it leverages a Python C -extension for low level cryptography -(`Cryptography `_), Paramiko itself is a pure Python -interface around SSH networking concepts. - -This website covers project information for Paramiko such as the changelog, -contribution guidelines, development roadmap, news/blog, and so forth. Detailed -usage and API documentation can be found at our code documentation site, -`docs.paramiko.org `_. - -Please see the sidebar to the left to begin. +.. include:: ../../README.rst .. toctree:: :hidden: @@ -20,14 +6,8 @@ Please see the sidebar to the left to begin. changelog FAQs installing + installing-1.x contributing contact -.. rubric:: Footnotes - -.. [#] - SSH is defined in :rfc:`4251`, :rfc:`4252`, :rfc:`4253` and :rfc:`4254`. The - primary working implementation of the protocol is the `OpenSSH project - `_. Paramiko implements a large portion of the SSH - feature set, but there are occasional gaps. diff --git a/sites/www/installing-1.x.rst b/sites/www/installing-1.x.rst index 0c2424bb8..7421a6c2e 100644 --- a/sites/www/installing-1.x.rst +++ b/sites/www/installing-1.x.rst @@ -1,3 +1,4 @@ +================ Installing (1.x) ================ @@ -48,6 +49,32 @@ Tools on the Mac, or the ``build-essential`` package on Ubuntu or Debian Linux -- basically, anything with ``gcc``, ``make`` and so forth) as well as the Python development libraries, often named ``python-dev`` or similar. +Slow vs fast crypto math +~~~~~~~~~~~~~~~~~~~~~~~~ + +PyCrypto attempts to use the ``gmp`` C math library if it is present on your +system, which enables what it internally calls "fastmath" (``_fastmath.so``). +When those headers are not available, it falls back to "slowmath" +(``_slowmath.py``) which is a pure-Python implementation. + +Real-world tests have shown significant benefits to using the C version of this +code; thus we strongly recommend you install the ``gmp`` development headers +**before** installing Paramiko/PyCrypto. E.g.:: + + $ apt-get install libgmp-dev # or just apt + $ yum install gmp-devel # or dnf + $ brew install gmp + +If you're unsure which version of math you've ended up with, a quick way to +check is to examine whether ``_fastmath.so`` or ``_slowmath.py`` appears in the +output of:: + + from Crypto.PublicKey import RSA + print(RSA._impl._math) + +Windows +~~~~~~~ + For **Windows** users we recommend using :ref:`pypm`, installing a C development environment such as `Cygwin `_ or obtaining a precompiled Win32 PyCrypto package from `voidspace's Python modules page @@ -91,4 +118,4 @@ First, see the main install doc's notes: :ref:`gssapi` - everything there is required for Paramiko 1.x as well. Additionally, users of Paramiko 1.x, on all platforms, need a final dependency: -`pyasn1 `_ ``0.1.7`` or better. +`pyasn1 `_ ``0.1.7`` or better. diff --git a/sites/www/installing.rst b/sites/www/installing.rst index 5a41a76be..b50efc4ba 100644 --- a/sites/www/installing.rst +++ b/sites/www/installing.rst @@ -19,18 +19,34 @@ via `pip `_:: $ pip install paramiko -.. note:: - Users who want the bleeding edge can install the development version via - ``pip install paramiko==dev``. +We currently support **Python 2.7, 3.4+, and PyPy**. Users on Python 2.6 or +older (or 3.3 or older) are urged to upgrade. + +Paramiko has only a few **direct dependencies**: + +- The big one, with its own sub-dependencies, is Cryptography; see :ref:`its + specific note below ` for more details; +- `bcrypt `_, for Ed25519 key support; +- `pynacl `_, also for Ed25519 key support. -We currently support **Python 2.6, 2.7, 3.3+, and PyPy**. Users on Python 2.5 -or older (or 3.2 or older) are urged to upgrade. +There are also a number of **optional dependencies** you may install using +`setuptools 'extras' +`_: -Paramiko has only one direct hard dependency: the Cryptography library. See -:ref:`cryptography`. +.. TODO 3.0: tweak the invoke line to mention proxycommand too +.. TODO 3.0: tweak the ed25519 line to remove the caveat -If you need GSS-API / SSPI support, see :ref:`the below subsection on it -` for details on its optional dependencies. +- If you want all optional dependencies at once, use ``paramiko[all]``. +- For ``Match exec`` config support, use ``paramiko[invoke]`` (which installs + `Invoke `_). +- For GSS-API / SSPI support, use ``paramiko[gssapi]``, though also see + :ref:`the below subsection on it ` for details. +- ``paramiko[ed25519]`` references the dependencies for Ed25519 key support. + + - As of Paramiko 2.x this doesn't technically do anything, as those + dependencies are core installation requirements. + - However, you should use this for forwards compatibility; 3.0 will drop + those dependencies from core, leaving them purely optional. .. _release-lines: @@ -86,8 +102,14 @@ In general, you'll need one of the following setups: 1.4 and newer - which Paramiko will gladly install or upgrade, if you e.g. ``pip install -U`` - drop that support. +* Similarly, Cryptography 3.4 and above require Rust language tooling to + install from source; once again see Cryptography's documentation for details + here, such as `their Rust install section`_ and `this FAQ entry`_. + .. _installation instructions: -.. _Cryptography's install docs: https://cryptography.io/en/latest/installation/ +.. _Cryptography's install docs: https://cryptography.io/en/latest/installation.html +.. _their Rust install section: https://cryptography.io/en/latest/installation.html#rust +.. _this FAQ entry: https://cryptography.io/en/latest/faq.html#installing-cryptography-fails-with-error-can-not-find-rust-compiler .. _gssapi: @@ -95,19 +117,9 @@ In general, you'll need one of the following setups: Optional dependencies for GSS-API / SSPI / Kerberos =================================================== -In order to use GSS-API/Kerberos & related functionality, a couple of -additional dependencies are required (these are not listed in our ``setup.py`` -due to their infrequent utility & non-platform-agnostic requirements): - -* It hopefully goes without saying but **all platforms** need **a working - installation of GSS-API itself**, e.g. Heimdal. -* **Unix** needs `python-gssapi `_ - ``0.6.1`` or better. - - .. note:: This library appears to only function on Python 2.7 and up. - -* **Windows** needs `pywin32 `_ ``2.1.8`` - or better. +In order to use GSS-API/Kerberos & related functionality, additional +dependencies are required. It hopefully goes without saying but **all +platforms** need **a working installation of GSS-API itself**, e.g. Heimdal. .. note:: If you use Microsoft SSPI for kerberos authentication and credential @@ -115,8 +127,34 @@ due to their infrequent utility & non-platform-agnostic requirements): active directory configuration. For details see: http://technet.microsoft.com/en-us/library/cc738491%28v=ws.10%29.aspx +The ``gssapi`` "extra" install flavor +------------------------------------- -.. toctree:: - :hidden: +If you're installing via ``pip`` (recommended), you should be able to get the +optional Python package requirements by changing your installation to refer to +``paramiko[gssapi]`` (from simply ``paramiko``), e.g.:: - installing-1.x + pip install "paramiko[gssapi]" + +(Or update your ``requirements.txt``, or etc.) + + +.. TODO: just axe this once legacy gssapi support is gone, no point reiterating + +Manual dependency installation +------------------------------ + +If you're not using ``pip`` or your ``pip`` is too old to support the "extras" +functionality, the optional dependencies are as follows: + +* All platforms need `pyasn1 `_ ``0.1.7`` or + later. +* **Unix** needs: `gssapi `__ ``1.4.1`` or better. + + * An alternative is the `python-gssapi + `_ library (``0.6.1`` or above), + though it is no longer maintained upstream, and Paramiko's support for + its API may eventually become deprecated. + +* **Windows** needs `pywin32 `_ ``2.1.8`` + or better. diff --git a/tasks.py b/tasks.py index 90ee758c8..2e1052977 100644 --- a/tasks.py +++ b/tasks.py @@ -1,45 +1,161 @@ -from os import mkdir +import os from os.path import join from shutil import rmtree, copytree from invoke import Collection, task +from invocations.checks import blacken from invocations.docs import docs, www, sites -from invocations.packaging import publish +from invocations.packaging.release import ns as release_coll, publish +from invocations.testing import count_errors -# Until we move to spec-based testing +# TODO: this screams out for the invoke missing-feature of "I just wrap task X, +# assume its signature by default" (even if that is just **kwargs support) @task -def test(ctx, coverage=False, flags=""): - if "--verbose" not in flags.split(): - flags += " --verbose" - runner = "python" +def test( + ctx, + verbose=True, + color=True, + capture="sys", + module=None, + k=None, + x=False, + opts="", + coverage=False, + include_slow=False, + loop_on_fail=False, +): + """ + Run unit tests via pytest. + + By default, known-slow parts of the suite are SKIPPED unless + ``--include-slow`` is given. (Note that ``--include-slow`` does not mesh + well with explicit ``--opts="-m=xxx"`` - if ``-m`` is found in ``--opts``, + ``--include-slow`` will be ignored!) + """ + if verbose and "--verbose" not in opts and "-v" not in opts: + opts += " --verbose" + # TODO: forget why invocations.pytest added this; is it to force color when + # running headless? Probably? + if color: + opts += " --color=yes" + opts += " --capture={}".format(capture) + if "-m" not in opts and not include_slow: + opts += " -m 'not slow'" + if k is not None and not ("-k" in opts if opts else False): + opts += " -k {}".format(k) + if x and not ("-x" in opts if opts else False): + opts += " -x" + if loop_on_fail and not ("-f" in opts if opts else False): + opts += " -f" + modstr = "" + if module is not None: + # NOTE: implicit test_ prefix as we're not on pytest-relaxed yet + modstr = " tests/test_{}.py".format(module) + # Switch runner depending on coverage or no coverage. + # TODO: get pytest's coverage plugin working, IIRC it has issues? + runner = "pytest" if coverage: - runner = "coverage run --source=paramiko" - ctx.run("{0} test.py {1}".format(runner, flags), pty=True) + # Leverage how pytest can be run as 'python -m pytest', and then how + # coverage can be told to run things in that manner instead of + # expecting a literal .py file. + runner = "coverage run -m pytest" + # Strip SSH_AUTH_SOCK from parent env to avoid pollution by interactive + # users. + # TODO: once pytest coverage plugin works, see if there's a pytest-native + # way to handle the env stuff too, then we can remove these tasks entirely + # in favor of just "run pytest"? + env = dict(os.environ) + if "SSH_AUTH_SOCK" in env: + del env["SSH_AUTH_SOCK"] + cmd = "{} {} {}".format(runner, opts, modstr) + # NOTE: we have a pytest.ini and tend to use that over PYTEST_ADDOPTS. + ctx.run(cmd, pty=True, env=env, replace_env=True) @task -def coverage(ctx): - ctx.run("coverage run --source=paramiko test.py --verbose") +def coverage(ctx, opts="", codecov=False): + """ + Execute all tests (normal and slow) with coverage enabled. + """ + test(ctx, coverage=True, include_slow=True, opts=opts) + # Cribbed from invocations.pytest.coverage for now + if codecov: + ctx.run("coverage xml") + ctx.run("codecov") + + +@task +def guard(ctx, opts=""): + """ + Execute all tests and then watch for changes, re-running. + """ + # TODO if coverage was run via pytest-cov, we could add coverage here too + return test(ctx, include_slow=True, loop_on_fail=True, opts=opts) # Until we stop bundling docs w/ releases. Need to discover use cases first. +# TODO: would be nice to tie this into our own version of build() too, but +# still have publish() use that build()...really need to try out classes! +# TODO 3.0: I'd like to just axe the 'built docs in sdist', none of my other +# projects do it. @task -def release(ctx, sdist=True, wheel=True, sign=True, dry_run=False): +def publish_( + ctx, sdist=True, wheel=True, sign=True, dry_run=False, index=None +): """ - Wraps invocations.packaging.release to add baked-in docs folder. + Wraps invocations.packaging.publish to add baked-in docs folder. """ # Build docs first. Use terribad workaround pending invoke #146 - ctx.run("inv docs") + ctx.run("inv docs", pty=True, hide=False) # Move the built docs into where Epydocs used to live - target = 'docs' + target = "docs" rmtree(target, ignore_errors=True) # TODO: make it easier to yank out this config val from the docs coll - copytree('sites/docs/_build', target) + copytree("sites/docs/_build", target) # Publish - publish(ctx, sdist=sdist, wheel=wheel, sign=sign, dry_run=dry_run) - # Remind - print("\n\nDon't forget to update RTD's versions page for new minor releases!") + publish( + ctx, sdist=sdist, wheel=wheel, sign=sign, dry_run=dry_run, index=index + ) + + +# Also have to hack up the newly enhanced all_() so it uses our publish +@task(name="all", default=True) +def all_(c, dry_run=False): + release_coll["prepare"](c, dry_run=dry_run) + publish_(c, dry_run=dry_run) + release_coll["push"](c, dry_run=dry_run) + release_coll["tidelift"](c, dry_run=dry_run) + +# TODO: "replace one task with another" needs a better public API, this is +# using unpublished internals & skips all the stuff add_task() does re: +# aliasing, defaults etc. +release_coll.tasks["publish"] = publish_ +release_coll.tasks["all"] = all_ -ns = Collection(test, coverage, release, docs, www, sites) +ns = Collection( + test, + coverage, + guard, + release_coll, + docs, + www, + sites, + count_errors, + blacken, +) +ns.configure( + { + "packaging": { + # NOTE: many of these are also set in kwarg defaults above; but + # having them here too means once we get rid of our custom + # release(), the behavior stays. + "sign": True, + "wheel": True, + "changelog_file": join( + www.configuration()["sphinx"]["source"], "changelog.rst" + ), + } + } +) diff --git a/test.py b/test.py deleted file mode 100755 index a1f13d859..000000000 --- a/test.py +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2003-2007 Robey Pointer -# -# This file is part of paramiko. -# -# Paramiko is free software; you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation; either version 2.1 of the License, or (at your option) -# any later version. -# -# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with Paramiko; if not, write to the Free Software Foundation, Inc., -# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. - -""" -do the unit tests! -""" - -import os -import re -import sys -import unittest -from optparse import OptionParser -import paramiko -import threading -from paramiko.py3compat import PY2 - -sys.path.append('tests') - -from tests.test_message import MessageTest -from tests.test_file import BufferedFileTest -from tests.test_buffered_pipe import BufferedPipeTest -from tests.test_util import UtilTest -from tests.test_hostkeys import HostKeysTest -from tests.test_pkey import KeyTest -from tests.test_kex import KexTest -from tests.test_packetizer import PacketizerTest -from tests.test_auth import AuthTest -from tests.test_transport import TransportTest -from tests.test_ssh_exception import NoValidConnectionsErrorTest -from tests.test_client import SSHClientTest -from test_client import SSHClientTest # XXX why shadow the above import? -from test_gssapi import GSSAPITest -from test_ssh_gss import GSSAuthTest -from test_kex_gss import GSSKexTest - -default_host = 'localhost' -default_user = os.environ.get('USER', 'nobody') -default_keyfile = os.path.join(os.environ.get('HOME', '/'), '.ssh/id_rsa') -default_passwd = None - - -def iter_suite_tests(suite): - """Return all tests in a suite, recursing through nested suites""" - for item in suite._tests: - if isinstance(item, unittest.TestCase): - yield item - elif isinstance(item, unittest.TestSuite): - for r in iter_suite_tests(item): - yield r - else: - raise Exception('unknown object %r inside test suite %r' - % (item, suite)) - - -def filter_suite_by_re(suite, pattern): - result = unittest.TestSuite() - filter_re = re.compile(pattern) - for test in iter_suite_tests(suite): - if filter_re.search(test.id()): - result.addTest(test) - return result - - -def main(): - parser = OptionParser('usage: %prog [options]') - parser.add_option('--verbose', action='store_true', dest='verbose', default=False, - help='verbose display (one line per test)') - parser.add_option('--no-pkey', action='store_false', dest='use_pkey', default=True, - help='skip RSA/DSS private key tests (which can take a while)') - parser.add_option('--no-transport', action='store_false', dest='use_transport', default=True, - help='skip transport tests (which can take a while)') - parser.add_option('--no-sftp', action='store_false', dest='use_sftp', default=True, - help='skip SFTP client/server tests, which can be slow') - parser.add_option('--no-big-file', action='store_false', dest='use_big_file', default=True, - help='skip big file SFTP tests, which are slow as molasses') - parser.add_option('--gssapi-test', action='store_true', dest='gssapi_test', default=False, - help='Test the used APIs for GSS-API / SSPI authentication') - parser.add_option('--test-gssauth', action='store_true', dest='test_gssauth', default=False, - help='Test GSS-API / SSPI authentication for SSHv2. To test this, you need kerberos a infrastructure.\ - Note: Paramiko needs access to your krb5.keytab file. Make it readable for Paramiko or\ - copy the used key to another file and set the environment variable KRB5_KTNAME to this file.') - parser.add_option('--test-gssapi-keyex', action='store_true', dest='test_gsskex', default=False, - help='Test GSS-API / SSPI authenticated iffie-Hellman Key Exchange and user\ - authentication. To test this, you need kerberos a infrastructure.\ - Note: Paramiko needs access to your krb5.keytab file. Make it readable for Paramiko or\ - copy the used key to another file and set the environment variable KRB5_KTNAME to this file.') - parser.add_option('-R', action='store_false', dest='use_loopback_sftp', default=True, - help='perform SFTP tests against a remote server (by default, SFTP tests ' + - 'are done through a loopback socket)') - parser.add_option('-H', '--sftp-host', dest='hostname', type='string', default=default_host, - metavar='', - help='[with -R] host for remote sftp tests (default: %s)' % default_host) - parser.add_option('-U', '--sftp-user', dest='username', type='string', default=default_user, - metavar='', - help='[with -R] username for remote sftp tests (default: %s)' % default_user) - parser.add_option('-K', '--sftp-key', dest='keyfile', type='string', default=default_keyfile, - metavar='', - help='[with -R] location of private key for remote sftp tests (default: %s)' % - default_keyfile) - parser.add_option('-P', '--sftp-passwd', dest='password', type='string', default=default_passwd, - metavar='', - help='[with -R] (optional) password to unlock the private key for remote sftp tests') - parser.add_option('--krb5_principal', dest='krb5_principal', type='string', - metavar='', - help='The krb5 principal (your username) for GSS-API / SSPI authentication') - parser.add_option('--targ_name', dest='targ_name', type='string', - metavar='', - help='Target name for GSS-API / SSPI authentication.\ - This is the hosts name you are running the test on in the kerberos database.') - parser.add_option('--server_mode', action='store_true', dest='server_mode', default=False, - help='Usage with --gssapi-test. Test the available GSS-API / SSPI server mode to.\ - Note: you need to have access to the kerberos keytab file.') - - options, args = parser.parse_args() - - # setup logging - paramiko.util.log_to_file('test.log') - - if options.use_sftp: - from tests.test_sftp import SFTPTest - if options.use_loopback_sftp: - SFTPTest.init_loopback() - else: - SFTPTest.init(options.hostname, options.username, options.keyfile, options.password) - if not options.use_big_file: - SFTPTest.set_big_file_test(False) - if options.use_big_file: - from tests.test_sftp_big import BigSFTPTest - - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(MessageTest)) - suite.addTest(unittest.makeSuite(BufferedFileTest)) - suite.addTest(unittest.makeSuite(BufferedPipeTest)) - suite.addTest(unittest.makeSuite(UtilTest)) - suite.addTest(unittest.makeSuite(HostKeysTest)) - if options.use_pkey: - suite.addTest(unittest.makeSuite(KeyTest)) - suite.addTest(unittest.makeSuite(KexTest)) - suite.addTest(unittest.makeSuite(PacketizerTest)) - if options.use_transport: - suite.addTest(unittest.makeSuite(AuthTest)) - suite.addTest(unittest.makeSuite(TransportTest)) - suite.addTest(unittest.makeSuite(NoValidConnectionsErrorTest)) - suite.addTest(unittest.makeSuite(SSHClientTest)) - if options.use_sftp: - suite.addTest(unittest.makeSuite(SFTPTest)) - if options.use_big_file: - suite.addTest(unittest.makeSuite(BigSFTPTest)) - if options.gssapi_test: - GSSAPITest.init(options.targ_name, options.server_mode) - suite.addTest(unittest.makeSuite(GSSAPITest)) - if options.test_gssauth: - GSSAuthTest.init(options.krb5_principal, options.targ_name) - suite.addTest(unittest.makeSuite(GSSAuthTest)) - if options.test_gsskex: - GSSKexTest.init(options.krb5_principal, options.targ_name) - suite.addTest(unittest.makeSuite(GSSKexTest)) - verbosity = 1 - if options.verbose: - verbosity = 2 - - runner = unittest.TextTestRunner(verbosity=verbosity) - if len(args) > 0: - filter = '|'.join(args) - suite = filter_suite_by_re(suite, filter) - result = runner.run(suite) - # Clean up stale threads from poorly cleaned-up tests. - # TODO: make that not a problem, jeez - for thread in threading.enumerate(): - if thread is not threading.currentThread(): - thread.join(timeout=1) - # Exit correctly - if not result.wasSuccessful(): - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..9850f3c1a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,57 @@ +"""Base classes and helpers for testing paramiko.""" + +import functools +import locale +import os + +from pytest import skip + + +# List of locales which have non-ascii characters in all categories. +# Omits most European languages which for instance may have only some months +# with names that include accented characters. +_non_ascii_locales = [ + # East Asian locales + "ja_JP", + "ko_KR", + "zh_CN", + "zh_TW", + # European locales with non-latin alphabets + "el_GR", + "ru_RU", + "uk_UA", +] +# Also include UTF-8 versions of these locales +_non_ascii_locales.extend([name + ".utf8" for name in _non_ascii_locales]) + + +def requireNonAsciiLocale(category_name="LC_ALL"): + """Run decorated test under a non-ascii locale or skip if not possible.""" + if os.name != "posix": + return skip("Non-posix OSes don't really use C locales") + cat = getattr(locale, category_name) + return functools.partial(_decorate_with_locale, cat, _non_ascii_locales) + + +def _decorate_with_locale(category, try_locales, test_method): + """Decorate test_method to run after switching to a different locale.""" + + def _test_under_locale(testself, sftp): + original = locale.setlocale(category) + while try_locales: + try: + locale.setlocale(category, try_locales[0]) + except locale.Error: + # Mutating original list is ok, setlocale would keep failing + try_locales.pop(0) + else: + try: + return test_method(testself) + finally: + locale.setlocale(category, original) + skipTest = getattr(testself, "skipTest", None) + if skipTest is not None: + skipTest("No usable locales installed") + + functools.update_wrapper(_test_under_locale, test_method) + return _test_under_locale diff --git a/tests/badhash_key1.ed25519.key b/tests/badhash_key1.ed25519.key new file mode 100644 index 000000000..3e33781b3 --- /dev/null +++ b/tests/badhash_key1.ed25519.key @@ -0,0 +1,7 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUx +OQAAACCULQjdmVfwpbDAFYz4mhKo6aCiAfkbaC+dEdq5eP1R9QAAAIjXZhzv12Yc7wAAAAtzc2gt +ZWQyNTUxOQAAACCULQjdmVfwpbDAFYz4mhKo6aCiAfkbaC+dEdq5eP1R9QAAAEByeJbhZUBL2aJ6 +wP85amzQuqDJRrNyAGMtDBJ43SURxpQtCN2ZV/ClsMAVjPiaEqjpoKIB+RtoL50R2rl4/VH1AAAA +AAECAwQF +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/badhash_key2.ed25519.key b/tests/badhash_key2.ed25519.key new file mode 100644 index 000000000..bf48edac0 --- /dev/null +++ b/tests/badhash_key2.ed25519.key @@ -0,0 +1,7 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZWQyNTUx +OQAAACACJbNmFu2Bk34HArxhiRYajoIN03Vr0umfNvsc9atE0AAAAIi5+po1ufqaNQAAAAtzc2gt +ZWQyNTUxOQAAACACJbNmFu2Bk34HArxhiRYajoIN03Vr0umfNvsc9atE0AAAAECh/ZzZJDOZGnil +BxJMm+nOhBpc07IVBjU1ii+S8zqFaAIls2YW7YGTfgcCvGGJFhqOgg3TdWvS6Z82+xz1q0TQAAAA +AAECAwQF +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/cert_support/test_dss.key b/tests/cert_support/test_dss.key new file mode 100644 index 000000000..e10807f12 --- /dev/null +++ b/tests/cert_support/test_dss.key @@ -0,0 +1,12 @@ +-----BEGIN DSA PRIVATE KEY----- +MIIBuwIBAAKBgQDngaYDZ30c6/7cJgEEbtl8FgKdwhba1Z7oOrOn4MI/6C42G1bY +wMuqZf4dBCglsdq39SHrcjbE8Vq54gPSOh3g4+uV9Rcg5IOoPLbwp2jQfF6f1FIb +sx7hrDCIqUcQccPSxetPBKmXI9RN8rZLaFuQeTnI65BKM98Ruwvq6SI2LwIVAPDP +hSeawaJI27mKqOfe5PPBSmyHAoGBAJMXxXmPD9sGaQ419DIpmZecJKBUAy9uXD8x +gbgeDpwfDaFJP8owByCKREocPFfi86LjCuQkyUKOfjYMN6iHIf1oEZjB8uJAatUr +FzI0ArXtUqOhwTLwTyFuUojE5own2WYsOAGByvgfyWjsGhvckYNhI4ODpNdPlxQ8 +ZamaPGPsAoGARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmn +jO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacI +BlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgECFGI9QPSc +h9pT9XHqn+1rZ4bK+QGA +-----END DSA PRIVATE KEY----- diff --git a/tests/cert_support/test_dss.key-cert.pub b/tests/cert_support/test_dss.key-cert.pub new file mode 100644 index 000000000..07fd5578c --- /dev/null +++ b/tests/cert_support/test_dss.key-cert.pub @@ -0,0 +1 @@ +ssh-dss-cert-v01@openssh.com AAAAHHNzaC1kc3MtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgJA3GjLmg6JbIWxokW/c827lmPOSvSfPDIY586yICFqIAAACBAOeBpgNnfRzr/twmAQRu2XwWAp3CFtrVnug6s6fgwj/oLjYbVtjAy6pl/h0EKCWx2rf1IetyNsTxWrniA9I6HeDj65X1FyDkg6g8tvCnaNB8Xp/UUhuzHuGsMIipRxBxw9LF608EqZcj1E3ytktoW5B5OcjrkEoz3xG7C+rpIjYvAAAAFQDwz4UnmsGiSNu5iqjn3uTzwUpshwAAAIEAkxfFeY8P2wZpDjX0MimZl5wkoFQDL25cPzGBuB4OnB8NoUk/yjAHIIpEShw8V+LzouMK5CTJQo5+Ngw3qIch/WgRmMHy4kBq1SsXMjQCte1So6HBMvBPIW5SiMTmjCfZZiw4AYHK+B/JaOwaG9yRg2Ejg4Ok10+XFDxlqZo8Y+wAAACARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmnjO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacIBlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgEAAAAAAAAAAAAAAAEAAAAJdXNlcl90ZXN0AAAACAAAAAR0ZXN0AAAAAAAAAAD//////////wAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAAAAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcndhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAAAAAAAAAAAAAEXAAAAB3NzaC1yc2EAAAADAQABAAABAQDskr46Umjxh3wo7PoPQsSVS3xt6+5PhwmXrnVtBBnkOo+zHRwQo8G8sY+Lc6oOOzA5GCSawKOwqE305GIDfB8/L9EKOkAjdN18imDjw/YuJFA4bl9yFhsXrCb1GZPJw0pJ0H0Eid9EldyMQAhGE49MWvnFMQl1TgO6YWq/g71xAFimge0LvVWijlbMy7O+nsGxSpinIprV5S9Viv8XC/ku89tadZfca1uxq751aGfAWGeYrVytpUl8UO0ggqH6BaUvkDU7rWh2n5RHUTvgzceKWnz5wqd8BngK37WmJjAgCtHCJS5ZRf6oJGj2QVcqc6cjvEFWsCuOKB4KAjktauWxAAABDwAAAAdzc2gtcnNhAAABAK6jweL231fRhFoybEGTOXJfj0lx55KpDsw9Q1rBvZhrSgwUr2dFr9HVcKe44mTC7CMtdW5VcyB67l1fnMil/D/e4zYxI0PvbW6RxLFNqvvtxBu5sGt5B7uzV4aAV31TpWR0l5RwwpZqc0NUlTx7oMutN1BDrPqW70QZ/iTEwalkn5fo1JWej0cf4BdC9VgYDLnprx0KN3IToukbszRQySnuR6MQUfj0m7lUloJfF3rq8G0kNxWqDGoJilMhO5Lqu9wAhlZWdouypI6bViO6+ToCVixLNUYs3EfS1zCxvXpiyMvh6rZofJ6WqzUuSd4Mzb2Ka4ocTKi7kynF+OG0Ivo= tests/test_dss.key.pub diff --git a/tests/cert_support/test_ecdsa_256.key b/tests/cert_support/test_ecdsa_256.key new file mode 100644 index 000000000..42d447340 --- /dev/null +++ b/tests/cert_support/test_ecdsa_256.key @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIKB6ty3yVyKEnfF/zprx0qwC76MsMlHY4HXCnqho2eKioAoGCCqGSM49 +AwEHoUQDQgAElI9mbdlaS+T9nHxY/59lFnn80EEecZDBHq4gLpccY8Mge5ZTMiMD +ADRvOqQ5R98Sxst765CAqXmRtz8vwoD96g== +-----END EC PRIVATE KEY----- diff --git a/tests/cert_support/test_ecdsa_256.key-cert.pub b/tests/cert_support/test_ecdsa_256.key-cert.pub new file mode 100644 index 000000000..f2c93ccf3 --- /dev/null +++ b/tests/cert_support/test_ecdsa_256.key-cert.pub @@ -0,0 +1 @@ +ecdsa-sha2-nistp256-cert-v01@openssh.com AAAAKGVjZHNhLXNoYTItbmlzdHAyNTYtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgJ+ZkRXedIWPl9y6fvel60p47ys5WgwMSjiwzJ2Ho+4MAAAAIbmlzdHAyNTYAAABBBJSPZm3ZWkvk/Zx8WP+fZRZ5/NBBHnGQwR6uIC6XHGPDIHuWUzIjAwA0bzqkOUffEsbLe+uQgKl5kbc/L8KA/eoAAAAAAAAAAAAAAAEAAAAJdXNlcl90ZXN0AAAACAAAAAR0ZXN0AAAAAAAAAAD//////////wAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAAAAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcndhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAAAAAAAAAAAAAEXAAAAB3NzaC1yc2EAAAADAQABAAABAQDskr46Umjxh3wo7PoPQsSVS3xt6+5PhwmXrnVtBBnkOo+zHRwQo8G8sY+Lc6oOOzA5GCSawKOwqE305GIDfB8/L9EKOkAjdN18imDjw/YuJFA4bl9yFhsXrCb1GZPJw0pJ0H0Eid9EldyMQAhGE49MWvnFMQl1TgO6YWq/g71xAFimge0LvVWijlbMy7O+nsGxSpinIprV5S9Viv8XC/ku89tadZfca1uxq751aGfAWGeYrVytpUl8UO0ggqH6BaUvkDU7rWh2n5RHUTvgzceKWnz5wqd8BngK37WmJjAgCtHCJS5ZRf6oJGj2QVcqc6cjvEFWsCuOKB4KAjktauWxAAABDwAAAAdzc2gtcnNhAAABALdnEil8XIFkcgLZgYwS2cIQPHetUzMNxYCqzk7mSfVpCaIYNTr27RG+f+sD0cerdAIUUvhCT7iA82/Y7wzwkO2RUBi61ATfw9DDPPRQTDfix1SSRwbmPB/nVI1HlPMCEs6y48PFaBZqXwJPS3qycgSxoTBhaLCLzT+r6HRaibY7kiRLDeL3/WHyasK2PRdcYJ6KrLd0ctQcUHZCLK3fJfMfuQRg8MZLVrmK3fHStCXHpRFueRxUhZjaiS9evA/NtzEQhf46JDClQ2rLYpSqSg7QUR/rKwqWWyMuQkOHmlJw797VVa+ZzpUFXP7ekWel3FaBj8IHiimIA7Jm6dOCLm4= tests/test_ecdsa_256.key.pub diff --git a/tests/cert_support/test_ed25519.key b/tests/cert_support/test_ed25519.key new file mode 100644 index 000000000..eb9f94c20 --- /dev/null +++ b/tests/cert_support/test_ed25519.key @@ -0,0 +1,8 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACB69SvZKJh/9VgSL0G27b5xVYa8nethH3IERbi0YqJDXwAAAKhjwAdrY8AH +awAAAAtzc2gtZWQyNTUxOQAAACB69SvZKJh/9VgSL0G27b5xVYa8nethH3IERbi0YqJDXw +AAAEA9tGQi2IrprbOSbDCF+RmAHd6meNSXBUQ2ekKXm4/8xnr1K9komH/1WBIvQbbtvnFV +hryd62EfcgRFuLRiokNfAAAAI2FsZXhfZ2F5bm9yQEFsZXhzLU1hY0Jvb2stQWlyLmxvY2 +FsAQI= +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/cert_support/test_ed25519.key-cert.pub b/tests/cert_support/test_ed25519.key-cert.pub new file mode 100644 index 000000000..4e01415a8 --- /dev/null +++ b/tests/cert_support/test_ed25519.key-cert.pub @@ -0,0 +1 @@ +ssh-ed25519-cert-v01@openssh.com AAAAIHNzaC1lZDI1NTE5LWNlcnQtdjAxQG9wZW5zc2guY29tAAAAIIjBkc8l1X887CLBHraU+d6/74Hxr9oa+3HC0iioecZ6AAAAIHr1K9komH/1WBIvQbbtvnFVhryd62EfcgRFuLRiokNfAAAAAAAAAAAAAAABAAAACXVzZXJfdGVzdAAAAAgAAAAEdGVzdAAAAAAAAAAA//////////8AAAAAAAAAggAAABVwZXJtaXQtWDExLWZvcndhcmRpbmcAAAAAAAAAF3Blcm1pdC1hZ2VudC1mb3J3YXJkaW5nAAAAAAAAABZwZXJtaXQtcG9ydC1mb3J3YXJkaW5nAAAAAAAAAApwZXJtaXQtcHR5AAAAAAAAAA5wZXJtaXQtdXNlci1yYwAAAAAAAAAAAAABFwAAAAdzc2gtcnNhAAAAAwEAAQAAAQEA7JK+OlJo8Yd8KOz6D0LElUt8bevuT4cJl651bQQZ5DqPsx0cEKPBvLGPi3OqDjswORgkmsCjsKhN9ORiA3wfPy/RCjpAI3TdfIpg48P2LiRQOG5fchYbF6wm9RmTycNKSdB9BInfRJXcjEAIRhOPTFr5xTEJdU4DumFqv4O9cQBYpoHtC71Voo5WzMuzvp7BsUqYpyKa1eUvVYr/Fwv5LvPbWnWX3Gtbsau+dWhnwFhnmK1craVJfFDtIIKh+gWlL5A1O61odp+UR1E74M3Hilp8+cKnfAZ4Ct+1piYwIArRwiUuWUX+qCRo9kFXKnOnI7xBVrArjigeCgI5LWrlsQAAAQ8AAAAHc3NoLXJzYQAAAQCNfYITv/GCW42fLI89x0pKpXIET/xHIBVan5S3fy5SZq9gLG1Db9g/FITDfOVA7OX8mU/91rucHGtuEi3isILdNFrCcoLEml289tyyluUbbFD5fjvBchMWBkYPwrOPfEzSs299Yk8ZgfV1pjWlndfV54s4c9pinkGu8c0Vzc6stEbWkdmoOHE8su3ogUPg/hOygDzJ+ZOgP5HIUJ6YgkgVpWgZm7zofwdZfa2HEb+WhZaKfMK1UCw1UiSBVk9dx6qzF9m243tHwSHraXvb9oJ1wT1S/MypTbP4RT4fHN8koYNrv2szEBN+lkRgk1D7xaxS/Md2TJsau9ho/UCXSR8L tests/test_ed25519.key.pub diff --git a/tests/cert_support/test_rsa.key b/tests/cert_support/test_rsa.key new file mode 100644 index 000000000..f50e9c538 --- /dev/null +++ b/tests/cert_support/test_rsa.key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICWgIBAAKBgQDTj1bqB4WmayWNPB+8jVSYpZYk80Ujvj680pOTh2bORBjbIAyz +oWGW+GUjzKxTiiPvVmxFgx5wdsFvF03v34lEVVhMpouqPAYQ15N37K/ir5XY+9m/ +d8ufMCkjeXsQkKqFbAlQcnWMCRnOoPHS3I4vi6hmnDDeeYTSRvfLbW0fhwIBIwKB +gBIiOqZYaoqbeD9OS9z2K9KR2atlTxGxOJPXiP4ESqP3NVScWNwyZ3NXHpyrJLa0 +EbVtzsQhLn6rF+TzXnOlcipFvjsem3iYzCpuChfGQ6SovTcOjHV9z+hnpXvQ/fon +soVRZY65wKnF7IAoUwTmJS9opqgrN6kRgCd3DASAMd1bAkEA96SBVWFt/fJBNJ9H +tYnBKZGw0VeHOYmVYbvMSstssn8un+pQpUm9vlG/bp7Oxd/m+b9KWEh2xPfv6zqU +avNwHwJBANqzGZa/EpzF4J8pGti7oIAPUIDGMtfIcmqNXVMckrmzQ2vTfqtkEZsA +4rE1IERRyiJQx6EJsz21wJmGV9WJQ5kCQQDwkS0uXqVdFzgHO6S++tjmjYcxwr3g +H0CoFYSgbddOT6miqRskOQF3DZVkJT3kyuBgU2zKygz52ukQZMqxCb1fAkASvuTv +qfpH87Qq5kQhNKdbbwbmd2NxlNabazPijWuphGTdW0VfJdWfklyS2Kr+iqrs/5wV +HhathJt636Eg7oIjAkA8ht3MQ+XSl9yIJIS8gVpbPxSw5OMfw0PjVE7tBdQruiSc +nvuQES5C9BMHjF39LZiGH1iLQy7FgdHyoP+eodI7 +-----END RSA PRIVATE KEY----- diff --git a/tests/cert_support/test_rsa.key-cert.pub b/tests/cert_support/test_rsa.key-cert.pub new file mode 100644 index 000000000..7487ab664 --- /dev/null +++ b/tests/cert_support/test_rsa.key-cert.pub @@ -0,0 +1 @@ +ssh-rsa-cert-v01@openssh.com AAAAHHNzaC1yc2EtY2VydC12MDFAb3BlbnNzaC5jb20AAAAgsZlXTd5NE4uzGAn6TyAqQj+IPbsTEFGap2x5pTRwQR8AAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4cAAAAAAAAE0gAAAAEAAAAmU2FtcGxlIHNlbGYtc2lnbmVkIE9wZW5TU0ggY2VydGlmaWNhdGUAAAASAAAABXVzZXIxAAAABXVzZXIyAAAAAAAAAAD//////////wAAAAAAAACCAAAAFXBlcm1pdC1YMTEtZm9yd2FyZGluZwAAAAAAAAAXcGVybWl0LWFnZW50LWZvcndhcmRpbmcAAAAAAAAAFnBlcm1pdC1wb3J0LWZvcndhcmRpbmcAAAAAAAAACnBlcm1pdC1wdHkAAAAAAAAADnBlcm1pdC11c2VyLXJjAAAAAAAAAAAAAACVAAAAB3NzaC1yc2EAAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4cAAACPAAAAB3NzaC1yc2EAAACATFHFsARDgQevc6YLxNnDNjsFtZ08KPMyYVx0w5xm95IVZHVWSOc5w+ccjqN9HRwxV3kP7IvL91qx0Uc3MJdB9g/O6HkAP+rpxTVoTb2EAMekwp5+i8nQJW4CN2BSsbQY1M6r7OBZ5nmF4hOW/5Pu4l22lXe2ydy8kEXOEuRpUeQ= test_rsa.key.pub diff --git a/tests/configs/basic b/tests/configs/basic new file mode 100644 index 000000000..93fe3beba --- /dev/null +++ b/tests/configs/basic @@ -0,0 +1,4 @@ +CanonicalDomains paramiko.org + +Host www.paramiko.org + User rando diff --git a/tests/configs/canon b/tests/configs/canon new file mode 100644 index 000000000..7b979408f --- /dev/null +++ b/tests/configs/canon @@ -0,0 +1,8 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +IdentityFile base.key + +Host www.paramiko.org + User rando + IdentityFile canonicalized.key diff --git a/tests/configs/canon-always b/tests/configs/canon-always new file mode 100644 index 000000000..f3f56b707 --- /dev/null +++ b/tests/configs/canon-always @@ -0,0 +1,5 @@ +CanonicalDomains paramiko.org +CanonicalizeHostname always + +Host www.paramiko.org + User rando diff --git a/tests/configs/canon-ipv4 b/tests/configs/canon-ipv4 new file mode 100644 index 000000000..92c3875f6 --- /dev/null +++ b/tests/configs/canon-ipv4 @@ -0,0 +1,6 @@ +CanonicalDomains paramiko.org +CanonicalizeHostname yes +AddressFamily inet + +Host www.paramiko.org + User rando diff --git a/tests/configs/canon-local b/tests/configs/canon-local new file mode 100644 index 000000000..dde9f77bc --- /dev/null +++ b/tests/configs/canon-local @@ -0,0 +1,6 @@ +Host www.paramiko.org + User rando + +Host www + CanonicalDomains paramiko.org + CanonicalizeHostname yes diff --git a/tests/configs/canon-local-always b/tests/configs/canon-local-always new file mode 100644 index 000000000..0ad0535a3 --- /dev/null +++ b/tests/configs/canon-local-always @@ -0,0 +1,6 @@ +Host www.paramiko.org + User rando + +Host www + CanonicalDomains paramiko.org + CanonicalizeHostname always diff --git a/tests/configs/deep-canon b/tests/configs/deep-canon new file mode 100644 index 000000000..483823d5e --- /dev/null +++ b/tests/configs/deep-canon @@ -0,0 +1,11 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Host www.paramiko.org + User rando + +Host sub.www.paramiko.org + User deep + +Host subber.sub.www.paramiko.org + User deeper diff --git a/tests/configs/deep-canon-maxdots b/tests/configs/deep-canon-maxdots new file mode 100644 index 000000000..7785f6604 --- /dev/null +++ b/tests/configs/deep-canon-maxdots @@ -0,0 +1,12 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org +CanonicalizeMaxDots 2 + +Host www.paramiko.org + User rando + +Host sub.www.paramiko.org + User deep + +Host subber.sub.www.paramiko.org + User deeper diff --git a/tests/configs/empty-canon b/tests/configs/empty-canon new file mode 100644 index 000000000..19743ad6f --- /dev/null +++ b/tests/configs/empty-canon @@ -0,0 +1,6 @@ +CanonicalizeHostname yes +CanonicalDomains +AddressFamily inet + +Host www.paramiko.org + User rando diff --git a/tests/configs/fallback-no b/tests/configs/fallback-no new file mode 100644 index 000000000..ec8d13ee3 --- /dev/null +++ b/tests/configs/fallback-no @@ -0,0 +1,6 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org +CanonicalizeFallbackLocal no + +Host www.paramiko.org + User rando diff --git a/tests/configs/fallback-yes b/tests/configs/fallback-yes new file mode 100644 index 000000000..bc4f4eee6 --- /dev/null +++ b/tests/configs/fallback-yes @@ -0,0 +1,6 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org +CanonicalizeFallbackLocal yes + +Host www.paramiko.org + User rando diff --git a/tests/configs/hostname-exec-tokenized b/tests/configs/hostname-exec-tokenized new file mode 100644 index 000000000..1cae2c03b --- /dev/null +++ b/tests/configs/hostname-exec-tokenized @@ -0,0 +1,2 @@ +Match exec "ping %h" + HostName pingable.%h diff --git a/tests/configs/hostname-tokenized b/tests/configs/hostname-tokenized new file mode 100644 index 000000000..1905c0cc5 --- /dev/null +++ b/tests/configs/hostname-tokenized @@ -0,0 +1 @@ +HostName prefix.%h diff --git a/tests/configs/invalid b/tests/configs/invalid new file mode 100644 index 000000000..81332fe84 --- /dev/null +++ b/tests/configs/invalid @@ -0,0 +1 @@ +lolwut diff --git a/tests/configs/match-all b/tests/configs/match-all new file mode 100644 index 000000000..7673e0a09 --- /dev/null +++ b/tests/configs/match-all @@ -0,0 +1,2 @@ +Match all + User awesome diff --git a/tests/configs/match-all-after-canonical b/tests/configs/match-all-after-canonical new file mode 100644 index 000000000..531112cb3 --- /dev/null +++ b/tests/configs/match-all-after-canonical @@ -0,0 +1,5 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Match canonical all + User awesome diff --git a/tests/configs/match-all-and-more b/tests/configs/match-all-and-more new file mode 100644 index 000000000..bb50696ee --- /dev/null +++ b/tests/configs/match-all-and-more @@ -0,0 +1,2 @@ +Match all exec "lol nope" + HostName whatever diff --git a/tests/configs/match-all-and-more-before b/tests/configs/match-all-and-more-before new file mode 100644 index 000000000..4d5b2e346 --- /dev/null +++ b/tests/configs/match-all-and-more-before @@ -0,0 +1,2 @@ +Match exec "lol nope" all + HostName whatever diff --git a/tests/configs/match-all-before-canonical b/tests/configs/match-all-before-canonical new file mode 100644 index 000000000..35e3b0e26 --- /dev/null +++ b/tests/configs/match-all-before-canonical @@ -0,0 +1,5 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Match all canonical + User oops diff --git a/tests/configs/match-canonical-no b/tests/configs/match-canonical-no new file mode 100644 index 000000000..e528dc64d --- /dev/null +++ b/tests/configs/match-canonical-no @@ -0,0 +1,7 @@ +CanonicalizeHostname no + +Match canonical all + User awesome + +Match !canonical host specific + User overload diff --git a/tests/configs/match-canonical-yes b/tests/configs/match-canonical-yes new file mode 100644 index 000000000..d6c20928d --- /dev/null +++ b/tests/configs/match-canonical-yes @@ -0,0 +1,5 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Match !canonical host www* + User hidden diff --git a/tests/configs/match-complex b/tests/configs/match-complex new file mode 100644 index 000000000..63634039c --- /dev/null +++ b/tests/configs/match-complex @@ -0,0 +1,17 @@ +HostName bogus + +Match originalhost target host bogus + User rand + +Match originalhost remote localuser rando + User calrissian + +# Just to set user for subsequent match +Match originalhost www + User calrissian + +Match !canonical originalhost www host bogus localuser rando user calrissian + Port 7777 + +Match !canonical !originalhost www host bogus localuser rando !user calrissian + Port 1234 diff --git a/tests/configs/match-exec b/tests/configs/match-exec new file mode 100644 index 000000000..62a147aa2 --- /dev/null +++ b/tests/configs/match-exec @@ -0,0 +1,16 @@ +Match exec "quoted" + User benjamin + +Match exec unquoted + User rando + +Match exec "quoted spaced" + User neil + +# Just to prepopulate values for tokenizing subsequent exec +Host target + User intermediate + HostName configured + +Match exec "%C %d %h %L %l %n %p %r %u" + Port 1337 diff --git a/tests/configs/match-exec-canonical b/tests/configs/match-exec-canonical new file mode 100644 index 000000000..794ee9d55 --- /dev/null +++ b/tests/configs/match-exec-canonical @@ -0,0 +1,10 @@ +CanonicalDomains paramiko.org +CanonicalizeHostname always + +# This will match in the first, uncanonicalized pass +Match !canonical exec uncanonicalized + User defenseless + +# And this will match the second time +Match canonical exec canonicalized + Port 8007 diff --git a/tests/configs/match-exec-negation b/tests/configs/match-exec-negation new file mode 100644 index 000000000..937c910e8 --- /dev/null +++ b/tests/configs/match-exec-negation @@ -0,0 +1,5 @@ +Match !exec "this succeeds" + User nope + +Match !exec "this fails" + User yup diff --git a/tests/configs/match-exec-no-arg b/tests/configs/match-exec-no-arg new file mode 100644 index 000000000..20c16d161 --- /dev/null +++ b/tests/configs/match-exec-no-arg @@ -0,0 +1,2 @@ +Match exec + User uh-oh diff --git a/tests/configs/match-host b/tests/configs/match-host new file mode 100644 index 000000000..86cbff5d8 --- /dev/null +++ b/tests/configs/match-host @@ -0,0 +1,2 @@ +Match host target + User rand diff --git a/tests/configs/match-host-canonicalized b/tests/configs/match-host-canonicalized new file mode 100644 index 000000000..52dadeae9 --- /dev/null +++ b/tests/configs/match-host-canonicalized @@ -0,0 +1,8 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Match host www.paramiko.org + User rand + +Match canonical host docs.paramiko.org + User eric diff --git a/tests/configs/match-host-from-match b/tests/configs/match-host-from-match new file mode 100644 index 000000000..172ee116d --- /dev/null +++ b/tests/configs/match-host-from-match @@ -0,0 +1,5 @@ +Match host original-host + HostName substituted-host + +Match host substituted-host + User inner diff --git a/tests/configs/match-host-glob b/tests/configs/match-host-glob new file mode 100644 index 000000000..3d53cf48a --- /dev/null +++ b/tests/configs/match-host-glob @@ -0,0 +1,2 @@ +Match host *ever + User matrim diff --git a/tests/configs/match-host-glob-list b/tests/configs/match-host-glob-list new file mode 100644 index 000000000..3617d1369 --- /dev/null +++ b/tests/configs/match-host-glob-list @@ -0,0 +1,8 @@ +Match host *ever + User matrim + +Match host somehost,someotherhost + User thom + +Match host goo*,!goof + User perrin diff --git a/tests/configs/match-host-name b/tests/configs/match-host-name new file mode 100644 index 000000000..783d939eb --- /dev/null +++ b/tests/configs/match-host-name @@ -0,0 +1,4 @@ +HostName default-host + +Match host default-host + User silly diff --git a/tests/configs/match-host-negated b/tests/configs/match-host-negated new file mode 100644 index 000000000..7c5d3f3ee --- /dev/null +++ b/tests/configs/match-host-negated @@ -0,0 +1,2 @@ +Match !host www + User jeff diff --git a/tests/configs/match-host-no-arg b/tests/configs/match-host-no-arg new file mode 100644 index 000000000..191cebb56 --- /dev/null +++ b/tests/configs/match-host-no-arg @@ -0,0 +1,2 @@ +Match host + User oops diff --git a/tests/configs/match-localuser b/tests/configs/match-localuser new file mode 100644 index 000000000..fe4a276c9 --- /dev/null +++ b/tests/configs/match-localuser @@ -0,0 +1,14 @@ +Match localuser gandalf + HostName gondor + +Match localuser b* + HostName shire + +Match localuser aragorn,frodo + HostName moria + +Match localuser gimli,!legolas + Port 7373 + +Match !localuser sauron + HostName mordor diff --git a/tests/configs/match-localuser-no-arg b/tests/configs/match-localuser-no-arg new file mode 100644 index 000000000..6623553ac --- /dev/null +++ b/tests/configs/match-localuser-no-arg @@ -0,0 +1,2 @@ +Match localuser + User oops diff --git a/tests/configs/match-orighost b/tests/configs/match-orighost new file mode 100644 index 000000000..10541993a --- /dev/null +++ b/tests/configs/match-orighost @@ -0,0 +1,16 @@ +HostName bogus + +Match originalhost target + User tuon + +Match originalhost what* + User matrim + +Match originalhost comma,sep* + User chameleon + +Match originalhost yep,!nope + User skipped + +Match !originalhost www !originalhost nope + User thom diff --git a/tests/configs/match-orighost-canonical b/tests/configs/match-orighost-canonical new file mode 100644 index 000000000..737345e8a --- /dev/null +++ b/tests/configs/match-orighost-canonical @@ -0,0 +1,5 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org + +Match originalhost www + User tuon diff --git a/tests/configs/match-orighost-no-arg b/tests/configs/match-orighost-no-arg new file mode 100644 index 000000000..427382ba0 --- /dev/null +++ b/tests/configs/match-orighost-no-arg @@ -0,0 +1,2 @@ +Match originalhost + User oops diff --git a/tests/configs/match-user b/tests/configs/match-user new file mode 100644 index 000000000..14d6ac12b --- /dev/null +++ b/tests/configs/match-user @@ -0,0 +1,14 @@ +Match user gandalf + HostName gondor + +Match user b* + HostName shire + +Match user aragorn,frodo + HostName moria + +Match user gimli,!legolas + Port 7373 + +Match !user sauron + HostName mordor diff --git a/tests/configs/match-user-explicit b/tests/configs/match-user-explicit new file mode 100644 index 000000000..9a2b1d82f --- /dev/null +++ b/tests/configs/match-user-explicit @@ -0,0 +1,4 @@ +User explicit + +Match user explicit + HostName dumb diff --git a/tests/configs/match-user-no-arg b/tests/configs/match-user-no-arg new file mode 100644 index 000000000..65a11ab4f --- /dev/null +++ b/tests/configs/match-user-no-arg @@ -0,0 +1,2 @@ +Match user + User oops diff --git a/tests/configs/multi-canon-domains b/tests/configs/multi-canon-domains new file mode 100644 index 000000000..5674b4423 --- /dev/null +++ b/tests/configs/multi-canon-domains @@ -0,0 +1,5 @@ +CanonicalizeHostname yes +CanonicalDomains not-a-real-tld paramiko.org + +Host www.paramiko.org + User rando diff --git a/tests/configs/no-canon b/tests/configs/no-canon new file mode 100644 index 000000000..033f8c536 --- /dev/null +++ b/tests/configs/no-canon @@ -0,0 +1,5 @@ +CanonicalizeHostname no +CanonicalDomains paramiko.org + +Host www.paramiko.org + User rando diff --git a/tests/configs/robey b/tests/configs/robey new file mode 100644 index 000000000..b20262249 --- /dev/null +++ b/tests/configs/robey @@ -0,0 +1,17 @@ +# A timeless classic? +# NOTE: some lines in here have 'extra' whitespace (incl trailing, and mixed +# tabs/spaces!) on purpose. + +Host * + User robey + IdentityFile =~/.ssh/id_rsa + +# comment +Host *.example.com + User bjork +Port=3333 +Host * + Crazy something dumb +Host spoo.example.com +Crazy something else + diff --git a/tests/configs/zero-maxdots b/tests/configs/zero-maxdots new file mode 100644 index 000000000..dc00054c2 --- /dev/null +++ b/tests/configs/zero-maxdots @@ -0,0 +1,9 @@ +CanonicalizeHostname yes +CanonicalDomains paramiko.org +CanonicalizeMaxDots 0 + +Host www.paramiko.org + User rando + +Host sub.www.paramiko.org + User deep diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..2b509c5c0 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,103 @@ +import logging +import os +import shutil +import threading + +import pytest +from paramiko import RSAKey, SFTPServer, SFTP, Transport + +from .loop import LoopSocket +from .stub_sftp import StubServer, StubSFTPServer +from .util import _support + + +# TODO: not a huge fan of conftest.py files, see if we can move these somewhere +# 'nicer'. + + +# Perform logging by default; pytest will capture and thus hide it normally, +# presenting it on error/failure. (But also allow turning it off when doing +# very pinpoint debugging - e.g. using breakpoints, so you don't want output +# hiding enabled, but also don't want all the logging to gum up the terminal.) +if not os.environ.get("DISABLE_LOGGING", False): + logging.basicConfig( + level=logging.DEBUG, + # Also make sure to set up timestamping for more sanity when debugging. + format="[%(relativeCreated)s]\t%(levelname)s:%(name)s:%(message)s", + datefmt="%H:%M:%S", + ) + + +def make_sftp_folder(): + """ + Ensure expected target temp folder exists on the remote end. + + Will clean it out if it already exists. + """ + # TODO: go back to using the sftp functionality itself for folder setup so + # we can test against live SFTP servers again someday. (Not clear if anyone + # is/was using the old capability for such, though...) + # TODO: something that would play nicer with concurrent testing (but + # probably e.g. using thread ID or UUIDs or something; not the "count up + # until you find one not used!" crap from before...) + # TODO: if we want to lock ourselves even harder into localhost-only + # testing (probably not?) could use tempdir modules for this for improved + # safety. Then again...why would someone have such a folder??? + path = os.environ.get("TEST_FOLDER", "paramiko-test-target") + # Forcibly nuke this directory locally, since at the moment, the below + # fixtures only ever run with a locally scoped stub test server. + shutil.rmtree(path, ignore_errors=True) + # Then create it anew, again locally, for the same reason. + os.mkdir(path) + return path + + +@pytest.fixture # (scope='session') +def sftp_server(): + """ + Set up an in-memory SFTP server thread. Yields the client Transport/socket. + + The resulting client Transport (along with all the server components) will + be the same object throughout the test session; the `sftp` fixture then + creates new higher level client objects wrapped around the client + Transport, as necessary. + """ + # Sockets & transports + socks = LoopSocket() + sockc = LoopSocket() + sockc.link(socks) + tc = Transport(sockc) + ts = Transport(socks) + # Auth + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) + ts.add_server_key(host_key) + # Server setup + event = threading.Event() + server = StubServer() + ts.set_subsystem_handler("sftp", SFTPServer, StubSFTPServer) + ts.start_server(event, server) + # Wait (so client has time to connect? Not sure. Old.) + event.wait(1.0) + # Make & yield connection. + tc.connect(username="slowdive", password="pygmalion") + yield tc + # TODO: any need for shutdown? Why didn't old suite do so? Or was that the + # point of the "join all threads from threading module" crap in test.py? + + +@pytest.fixture +def sftp(sftp_server): + """ + Yield an SFTP client connected to the global in-session SFTP server thread. + """ + # Client setup + client = SFTP.from_transport(sftp_server) + # Work in 'remote' folder setup (as it wants to use the client) + # TODO: how cleanest to make this available to tests? Doing it this way is + # marginally less bad than the previous 'global'-using setup, but not by + # much? + client.FOLDER = make_sftp_folder() + # Yield client to caller + yield client + # Clean up - as in make_sftp_folder, we assume local-only exec for now. + shutil.rmtree(client.FOLDER, ignore_errors=True) diff --git a/tests/loop.py b/tests/loop.py index e805ad965..dd1f5a0c8 100644 --- a/tests/loop.py +++ b/tests/loop.py @@ -16,21 +16,19 @@ # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. -""" -... -""" +import socket +import threading -import threading, socket from paramiko.common import asbytes -class LoopSocket (object): +class LoopSocket(object): """ A LoopSocket looks like a normal socket, but all data written to it is delivered on the read-end of another LoopSocket, and vice versa. It's like a software "socketpair". """ - + def __init__(self): self.__in_buffer = bytes() self.__lock = threading.Lock() @@ -86,7 +84,7 @@ def __feed(self, data): self.__cv.notifyAll() finally: self.__lock.release() - + def __unlink(self): m = None self.__lock.acquire() @@ -98,5 +96,3 @@ def __unlink(self): self.__lock.release() if m is not None: m.__unlink() - - diff --git a/tests/stub_sftp.py b/tests/stub_sftp.py index 24380ba1a..1528a0b88 100644 --- a/tests/stub_sftp.py +++ b/tests/stub_sftp.py @@ -21,13 +21,22 @@ """ import os -import sys -from paramiko import ServerInterface, SFTPServerInterface, SFTPServer, SFTPAttributes, \ - SFTPHandle, SFTP_OK, AUTH_SUCCESSFUL, OPEN_SUCCEEDED + +from paramiko import ( + AUTH_SUCCESSFUL, + OPEN_SUCCEEDED, + SFTPAttributes, + SFTPHandle, + SFTPServer, + SFTPServerInterface, + SFTP_FAILURE, + SFTP_OK, + ServerInterface, +) from paramiko.common import o666 -class StubServer (ServerInterface): +class StubServer(ServerInterface): def check_auth_password(self, username, password): # all are allowed return AUTH_SUCCESSFUL @@ -36,7 +45,7 @@ def check_channel_request(self, kind, chanid): return OPEN_SUCCEEDED -class StubSFTPHandle (SFTPHandle): +class StubSFTPHandle(SFTPHandle): def stat(self): try: return SFTPAttributes.from_stat(os.fstat(self.readfile.fileno())) @@ -53,11 +62,12 @@ def chattr(self, attr): return SFTPServer.convert_errno(e.errno) -class StubSFTPServer (SFTPServerInterface): +class StubSFTPServer(SFTPServerInterface): # assume current folder is a fine root - # (the tests always create and eventualy delete a subfolder, so there shouldn't be any mess) + # (the tests always create and eventually delete a subfolder, so there + # shouldn't be any mess) ROOT = os.getcwd() - + def _realpath(self, path): return self.ROOT + self.canonicalize(path) @@ -67,7 +77,9 @@ def list_folder(self, path): out = [] flist = os.listdir(path) for fname in flist: - attr = SFTPAttributes.from_stat(os.stat(os.path.join(path, fname))) + attr = SFTPAttributes.from_stat( + os.stat(os.path.join(path, fname)) + ) attr.filename = fname out.append(attr) return out @@ -91,9 +103,9 @@ def lstat(self, path): def open(self, path, flags, attr): path = self._realpath(path) try: - binary_flag = getattr(os, 'O_BINARY', 0) + binary_flag = getattr(os, "O_BINARY", 0) flags |= binary_flag - mode = getattr(attr, 'st_mode', None) + mode = getattr(attr, "st_mode", None) if mode is not None: fd = os.open(path, flags, mode) else: @@ -107,17 +119,17 @@ def open(self, path, flags, attr): SFTPServer.set_file_attr(path, attr) if flags & os.O_WRONLY: if flags & os.O_APPEND: - fstr = 'ab' + fstr = "ab" else: - fstr = 'wb' + fstr = "wb" elif flags & os.O_RDWR: if flags & os.O_APPEND: - fstr = 'a+b' + fstr = "a+b" else: - fstr = 'r+b' + fstr = "r+b" else: # O_RDONLY (== 0) - fstr = 'rb' + fstr = "rb" try: f = os.fdopen(fd, fstr) except OSError as e: @@ -137,6 +149,17 @@ def remove(self, path): return SFTP_OK def rename(self, oldpath, newpath): + oldpath = self._realpath(oldpath) + newpath = self._realpath(newpath) + if os.path.exists(newpath): + return SFTP_FAILURE + try: + os.rename(oldpath, newpath) + except OSError as e: + return SFTPServer.convert_errno(e.errno) + return SFTP_OK + + def posix_rename(self, oldpath, newpath): oldpath = self._realpath(oldpath) newpath = self._realpath(newpath) try: @@ -173,18 +196,19 @@ def chattr(self, path, attr): def symlink(self, target_path, path): path = self._realpath(path) - if (len(target_path) > 0) and (target_path[0] == '/'): + if (len(target_path) > 0) and (target_path[0] == "/"): # absolute symlink target_path = os.path.join(self.ROOT, target_path[1:]) - if target_path[:2] == '//': + if target_path[:2] == "//": # bug in os.path.join target_path = target_path[1:] else: # compute relative to path abspath = os.path.join(os.path.dirname(path), target_path) - if abspath[:len(self.ROOT)] != self.ROOT: - # this symlink isn't going to work anyway -- just break it immediately - target_path = '' + if abspath[: len(self.ROOT)] != self.ROOT: + # this symlink isn't going to work anyway -- just break it + # immediately + target_path = "" try: os.symlink(target_path, path) except OSError as e: @@ -199,10 +223,10 @@ def readlink(self, path): return SFTPServer.convert_errno(e.errno) # if it's absolute, remove the root if os.path.isabs(symlink): - if symlink[:len(self.ROOT)] == self.ROOT: - symlink = symlink[len(self.ROOT):] - if (len(symlink) == 0) or (symlink[0] != '/'): - symlink = '/' + symlink + if symlink[: len(self.ROOT)] == self.ROOT: + symlink = symlink[len(self.ROOT) :] + if (len(symlink) == 0) or (symlink[0] != "/"): + symlink = "/" + symlink else: - symlink = '' + symlink = "" return symlink diff --git a/tests/test_agent.py b/tests/test_agent.py new file mode 100644 index 000000000..c3973bbb6 --- /dev/null +++ b/tests/test_agent.py @@ -0,0 +1,50 @@ +import unittest + +from paramiko.message import Message +from paramiko.agent import ( + SSH2_AGENT_SIGN_RESPONSE, + cSSH2_AGENTC_SIGN_REQUEST, + SSH_AGENT_RSA_SHA2_256, + SSH_AGENT_RSA_SHA2_512, + AgentKey, +) +from paramiko.py3compat import b + + +class ChaosAgent: + def _send_message(self, msg): + self._sent_message = msg + sig = Message() + sig.add_string(b("lol")) + sig.rewind() + return SSH2_AGENT_SIGN_RESPONSE, sig + + +class AgentTests(unittest.TestCase): + def _sign_with_agent(self, kwargs, expectation): + agent = ChaosAgent() + key = AgentKey(agent, b("secret!!!")) + result = key.sign_ssh_data(b("token"), **kwargs) + assert result == b("lol") + msg = agent._sent_message + msg.rewind() + assert msg.get_byte() == cSSH2_AGENTC_SIGN_REQUEST + assert msg.get_string() == b("secret!!!") + assert msg.get_string() == b("token") + assert msg.get_int() == expectation + + def test_agent_signing_defaults_to_0_for_flags_field(self): + # No algorithm kwarg at all + self._sign_with_agent(kwargs=dict(), expectation=0) + + def test_agent_signing_is_2_for_SHA256(self): + self._sign_with_agent( + kwargs=dict(algorithm="rsa-sha2-256"), + expectation=SSH_AGENT_RSA_SHA2_256, + ) + + def test_agent_signing_is_2_for_SHA512(self): + self._sign_with_agent( + kwargs=dict(algorithm="rsa-sha2-512"), + expectation=SSH_AGENT_RSA_SHA2_512, + ) diff --git a/tests/test_auth.py b/tests/test_auth.py index 235177902..01fbac5b6 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -23,60 +23,75 @@ import sys import threading import unittest +from time import sleep -from paramiko import Transport, ServerInterface, RSAKey, DSSKey, \ - BadAuthenticationType, InteractiveQuery, \ - AuthenticationException +from paramiko import ( + Transport, + ServerInterface, + RSAKey, + DSSKey, + BadAuthenticationType, + InteractiveQuery, + AuthenticationException, +) from paramiko import AUTH_FAILED, AUTH_PARTIALLY_SUCCESSFUL, AUTH_SUCCESSFUL from paramiko.py3compat import u -from tests.loop import LoopSocket -from tests.util import test_path -_pwd = u('\u2022') +from .loop import LoopSocket +from .util import _support, slow -class NullServer (ServerInterface): +_pwd = u("\u2022") + + +class NullServer(ServerInterface): paranoid_did_password = False paranoid_did_public_key = False - paranoid_key = DSSKey.from_private_key_file(test_path('test_dss.key')) + paranoid_key = DSSKey.from_private_key_file(_support("test_dss.key")) def get_allowed_auths(self, username): - if username == 'slowdive': - return 'publickey,password' - if username == 'paranoid': - if not self.paranoid_did_password and not self.paranoid_did_public_key: - return 'publickey,password' + if username == "slowdive": + return "publickey,password" + if username == "paranoid": + if ( + not self.paranoid_did_password + and not self.paranoid_did_public_key + ): + return "publickey,password" elif self.paranoid_did_password: - return 'publickey' + return "publickey" else: - return 'password' - if username == 'commie': - return 'keyboard-interactive' - if username == 'utf8': - return 'password' - if username == 'non-utf8': - return 'password' - return 'publickey' + return "password" + if username == "commie": + return "keyboard-interactive" + if username == "utf8": + return "password" + if username == "non-utf8": + return "password" + return "publickey" def check_auth_password(self, username, password): - if (username == 'slowdive') and (password == 'pygmalion'): + if (username == "slowdive") and (password == "pygmalion"): return AUTH_SUCCESSFUL - if (username == 'paranoid') and (password == 'paranoid'): + if (username == "paranoid") and (password == "paranoid"): # 2-part auth (even openssh doesn't support this) self.paranoid_did_password = True if self.paranoid_did_public_key: return AUTH_SUCCESSFUL return AUTH_PARTIALLY_SUCCESSFUL - if (username == 'utf8') and (password == _pwd): + if (username == "utf8") and (password == _pwd): return AUTH_SUCCESSFUL - if (username == 'non-utf8') and (password == '\xff'): + if (username == "non-utf8") and (password == "\xff"): return AUTH_SUCCESSFUL - if username == 'bad-server': + if username == "bad-server": raise Exception("Ack!") + if username == "unresponsive-server": + sleep(5) + return AUTH_SUCCESSFUL return AUTH_FAILED def check_auth_publickey(self, username, key): - if (username == 'paranoid') and (key == self.paranoid_key): + if (username == "paranoid") and (key == self.paranoid_key): # 2-part auth self.paranoid_did_public_key = True if self.paranoid_did_password: @@ -85,20 +100,21 @@ def check_auth_publickey(self, username, key): return AUTH_FAILED def check_auth_interactive(self, username, submethods): - if username == 'commie': + if username == "commie": self.username = username - return InteractiveQuery('password', 'Please enter a password.', ('Password', False)) + return InteractiveQuery( + "password", "Please enter a password.", ("Password", False) + ) return AUTH_FAILED def check_auth_interactive_response(self, responses): - if self.username == 'commie': - if (len(responses) == 1) and (responses[0] == 'cat'): + if self.username == "commie": + if (len(responses) == 1) and (responses[0] == "cat"): return AUTH_SUCCESSFUL return AUTH_FAILED -class AuthTest (unittest.TestCase): - +class AuthTest(unittest.TestCase): def setUp(self): self.socks = LoopSocket() self.sockc = LoopSocket() @@ -113,7 +129,7 @@ def tearDown(self): self.sockc.close() def start_server(self): - host_key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) self.public_host_key = RSAKey(data=host_key.asbytes()) self.ts.add_server_key(host_key) self.event = threading.Event() @@ -126,22 +142,25 @@ def verify_finished(self): self.assertTrue(self.event.is_set()) self.assertTrue(self.ts.is_active()) - def test_1_bad_auth_type(self): + def test_bad_auth_type(self): """ verify that we get the right exception when an unsupported auth type is requested. """ self.start_server() try: - self.tc.connect(hostkey=self.public_host_key, - username='unknown', password='error') + self.tc.connect( + hostkey=self.public_host_key, + username="unknown", + password="error", + ) self.assertTrue(False) except: etype, evalue, etb = sys.exc_info() self.assertEqual(BadAuthenticationType, etype) - self.assertEqual(['publickey'], evalue.allowed_types) + self.assertEqual(["publickey"], evalue.allowed_types) - def test_2_bad_password(self): + def test_bad_password(self): """ verify that a bad password gets the right exception, and that a retry with the right password works. @@ -149,28 +168,30 @@ def test_2_bad_password(self): self.start_server() self.tc.connect(hostkey=self.public_host_key) try: - self.tc.auth_password(username='slowdive', password='error') + self.tc.auth_password(username="slowdive", password="error") self.assertTrue(False) except: etype, evalue, etb = sys.exc_info() self.assertTrue(issubclass(etype, AuthenticationException)) - self.tc.auth_password(username='slowdive', password='pygmalion') + self.tc.auth_password(username="slowdive", password="pygmalion") self.verify_finished() - def test_3_multipart_auth(self): + def test_multipart_auth(self): """ verify that multipart auth works. """ self.start_server() self.tc.connect(hostkey=self.public_host_key) - remain = self.tc.auth_password(username='paranoid', password='paranoid') - self.assertEqual(['publickey'], remain) - key = DSSKey.from_private_key_file(test_path('test_dss.key')) - remain = self.tc.auth_publickey(username='paranoid', key=key) + remain = self.tc.auth_password( + username="paranoid", password="paranoid" + ) + self.assertEqual(["publickey"], remain) + key = DSSKey.from_private_key_file(_support("test_dss.key")) + remain = self.tc.auth_publickey(username="paranoid", key=key) self.assertEqual([], remain) self.verify_finished() - def test_4_interactive_auth(self): + def test_interactive_auth(self): """ verify keyboard-interactive auth works. """ @@ -181,46 +202,47 @@ def handler(title, instructions, prompts): self.got_title = title self.got_instructions = instructions self.got_prompts = prompts - return ['cat'] - remain = self.tc.auth_interactive('commie', handler) - self.assertEqual(self.got_title, 'password') - self.assertEqual(self.got_prompts, [('Password', False)]) + return ["cat"] + + remain = self.tc.auth_interactive("commie", handler) + self.assertEqual(self.got_title, "password") + self.assertEqual(self.got_prompts, [("Password", False)]) self.assertEqual([], remain) self.verify_finished() - def test_5_interactive_auth_fallback(self): + def test_interactive_auth_fallback(self): """ verify that a password auth attempt will fallback to "interactive" if password auth isn't supported but interactive is. """ self.start_server() self.tc.connect(hostkey=self.public_host_key) - remain = self.tc.auth_password('commie', 'cat') + remain = self.tc.auth_password("commie", "cat") self.assertEqual([], remain) self.verify_finished() - def test_6_auth_utf8(self): + def test_auth_utf8(self): """ verify that utf-8 encoding happens in authentication. """ self.start_server() self.tc.connect(hostkey=self.public_host_key) - remain = self.tc.auth_password('utf8', _pwd) + remain = self.tc.auth_password("utf8", _pwd) self.assertEqual([], remain) self.verify_finished() - def test_7_auth_non_utf8(self): + def test_auth_non_utf8(self): """ verify that non-utf-8 encoded passwords can be used for broken servers. """ self.start_server() self.tc.connect(hostkey=self.public_host_key) - remain = self.tc.auth_password('non-utf8', '\xff') + remain = self.tc.auth_password("non-utf8", "\xff") self.assertEqual([], remain) self.verify_finished() - def test_8_auth_gets_disconnected(self): + def test_auth_gets_disconnected(self): """ verify that we catch a server disconnecting during auth, and report it as an auth failure. @@ -228,7 +250,23 @@ def test_8_auth_gets_disconnected(self): self.start_server() self.tc.connect(hostkey=self.public_host_key) try: - remain = self.tc.auth_password('bad-server', 'hello') + self.tc.auth_password("bad-server", "hello") + except: + etype, evalue, etb = sys.exc_info() + self.assertTrue(issubclass(etype, AuthenticationException)) + + @slow + def test_auth_non_responsive(self): + """ + verify that authentication times out if server takes to long to + respond (or never responds). + """ + self.tc.auth_timeout = 1 # 1 second, to speed up test + self.start_server() + self.tc.connect() + try: + self.tc.auth_password("unresponsive-server", "hello") except: etype, evalue, etb = sys.exc_info() self.assertTrue(issubclass(etype, AuthenticationException)) + self.assertTrue("Authentication timeout" in str(evalue)) diff --git a/tests/test_buffered_pipe.py b/tests/test_buffered_pipe.py index eeb4d0ad2..61c99cc0b 100644 --- a/tests/test_buffered_pipe.py +++ b/tests/test_buffered_pipe.py @@ -23,15 +23,15 @@ import threading import time import unittest + from paramiko.buffered_pipe import BufferedPipe, PipeTimeout from paramiko import pipe -from paramiko.py3compat import b def delay_thread(p): - p.feed('a') + p.feed("a") time.sleep(0.5) - p.feed('b') + p.feed("b") p.close() @@ -41,43 +41,43 @@ def close_thread(p): class BufferedPipeTest(unittest.TestCase): - def test_1_buffered_pipe(self): + def test_buffered_pipe(self): p = BufferedPipe() self.assertTrue(not p.read_ready()) - p.feed('hello.') + p.feed("hello.") self.assertTrue(p.read_ready()) data = p.read(6) - self.assertEqual(b'hello.', data) + self.assertEqual(b"hello.", data) - p.feed('plus/minus') - self.assertEqual(b'plu', p.read(3)) - self.assertEqual(b's/m', p.read(3)) - self.assertEqual(b'inus', p.read(4)) + p.feed("plus/minus") + self.assertEqual(b"plu", p.read(3)) + self.assertEqual(b"s/m", p.read(3)) + self.assertEqual(b"inus", p.read(4)) p.close() self.assertTrue(not p.read_ready()) - self.assertEqual(b'', p.read(1)) + self.assertEqual(b"", p.read(1)) - def test_2_delay(self): + def test_delay(self): p = BufferedPipe() self.assertTrue(not p.read_ready()) threading.Thread(target=delay_thread, args=(p,)).start() - self.assertEqual(b'a', p.read(1, 0.1)) + self.assertEqual(b"a", p.read(1, 0.1)) try: p.read(1, 0.1) self.assertTrue(False) except PipeTimeout: pass - self.assertEqual(b'b', p.read(1, 1.0)) - self.assertEqual(b'', p.read(1)) + self.assertEqual(b"b", p.read(1, 1.0)) + self.assertEqual(b"", p.read(1)) - def test_3_close_while_reading(self): + def test_close_while_reading(self): p = BufferedPipe() threading.Thread(target=close_thread, args=(p,)).start() data = p.read(1, 1.0) - self.assertEqual(b'', data) + self.assertEqual(b"", data) - def test_4_or_pipe(self): + def test_or_pipe(self): p = pipe.make_pipe() p1, p2 = pipe.make_or_pipe(p) self.assertFalse(p._set) diff --git a/tests/test_channelfile.py b/tests/test_channelfile.py new file mode 100644 index 000000000..4448fdfbc --- /dev/null +++ b/tests/test_channelfile.py @@ -0,0 +1,60 @@ +from mock import patch, MagicMock + +from paramiko import Channel, ChannelFile, ChannelStderrFile, ChannelStdinFile + + +class ChannelFileBase(object): + @patch("paramiko.channel.ChannelFile._set_mode") + def test_defaults_to_unbuffered_reading(self, setmode): + self.klass(Channel(None)) + setmode.assert_called_once_with("r", -1) + + @patch("paramiko.channel.ChannelFile._set_mode") + def test_can_override_mode_and_bufsize(self, setmode): + self.klass(Channel(None), mode="w", bufsize=25) + setmode.assert_called_once_with("w", 25) + + def test_read_recvs_from_channel(self): + chan = MagicMock() + cf = self.klass(chan) + cf.read(100) + chan.recv.assert_called_once_with(100) + + def test_write_calls_channel_sendall(self): + chan = MagicMock() + cf = self.klass(chan, mode="w") + cf.write("ohai") + chan.sendall.assert_called_once_with(b"ohai") + + +class TestChannelFile(ChannelFileBase): + klass = ChannelFile + + +class TestChannelStderrFile(object): + def test_read_calls_channel_recv_stderr(self): + chan = MagicMock() + cf = ChannelStderrFile(chan) + cf.read(100) + chan.recv_stderr.assert_called_once_with(100) + + def test_write_calls_channel_sendall(self): + chan = MagicMock() + cf = ChannelStderrFile(chan, mode="w") + cf.write("ohai") + chan.sendall_stderr.assert_called_once_with(b"ohai") + + +class TestChannelStdinFile(ChannelFileBase): + klass = ChannelStdinFile + + def test_close_calls_channel_shutdown_write(self): + chan = MagicMock() + cf = ChannelStdinFile(chan, mode="wb") + cf.flush = MagicMock() + cf.close() + # Sanity check that we still call BufferedFile.close() + cf.flush.assert_called_once_with() + assert cf._closed is True + # Actual point of test + chan.shutdown_write.assert_called_once_with() diff --git a/tests/test_client.py b/tests/test_client.py index 63ff92976..f14aac230 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -20,45 +20,60 @@ Some unit tests for SSHClient. """ -from __future__ import with_statement +from __future__ import with_statement, print_function import gc +import os import platform import socket -from tempfile import mkstemp import threading +import time import unittest -import weakref import warnings -import os -import time -from tests.util import test_path +import weakref +from tempfile import mkstemp + +from pytest_relaxed import raises +from mock import patch, Mock import paramiko -from paramiko.common import PY2 -from paramiko.ssh_exception import SSHException +from paramiko import SSHClient +from paramiko.pkey import PublicBlob +from paramiko.ssh_exception import SSHException, AuthenticationException + +from .util import _support, slow +requires_gss_auth = unittest.skipUnless( + paramiko.GSS_AUTH_AVAILABLE, "GSS auth not available" +) + FINGERPRINTS = { - 'ssh-dss': b'\x44\x78\xf0\xb9\xa2\x3c\xc5\x18\x20\x09\xff\x75\x5b\xc1\xd2\x6c', - 'ssh-rsa': b'\x60\x73\x38\x44\xcb\x51\x86\x65\x7f\xde\xda\xa2\x2b\x5a\x57\xd5', - 'ecdsa-sha2-nistp256': b'\x25\x19\xeb\x55\xe6\xa1\x47\xff\x4f\x38\xd2\x75\x6f\xa5\xd5\x60', + "ssh-dss": b"\x44\x78\xf0\xb9\xa2\x3c\xc5\x18\x20\x09\xff\x75\x5b\xc1\xd2\x6c", # noqa + "ssh-rsa": b"\x60\x73\x38\x44\xcb\x51\x86\x65\x7f\xde\xda\xa2\x2b\x5a\x57\xd5", # noqa + "ecdsa-sha2-nistp256": b"\x25\x19\xeb\x55\xe6\xa1\x47\xff\x4f\x38\xd2\x75\x6f\xa5\xd5\x60", # noqa + "ssh-ed25519": b'\xb3\xd5"\xaa\xf9u^\xe8\xcd\x0e\xea\x02\xb9)\xa2\x80', } -class NullServer (paramiko.ServerInterface): +class NullServer(paramiko.ServerInterface): def __init__(self, *args, **kwargs): # Allow tests to enable/disable specific key types - self.__allowed_keys = kwargs.pop('allowed_keys', []) + self.__allowed_keys = kwargs.pop("allowed_keys", []) + # And allow them to set a (single...meh) expected public blob (cert) + self.__expected_public_blob = kwargs.pop("public_blob", None) super(NullServer, self).__init__(*args, **kwargs) def get_allowed_auths(self, username): - if username == 'slowdive': - return 'publickey,password' - return 'publickey' + if username == "slowdive": + return "publickey,password" + return "publickey" def check_auth_password(self, username, password): - if (username == 'slowdive') and (password == 'pygmalion'): + if (username == "slowdive") and (password == "pygmalion"): + return paramiko.AUTH_SUCCESSFUL + if (username == "slowdive") and (password == "unresponsive-server"): + time.sleep(5) return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED @@ -67,50 +82,95 @@ def check_auth_publickey(self, username, key): expected = FINGERPRINTS[key.get_name()] except KeyError: return paramiko.AUTH_FAILED + # Base check: allowed auth type & fingerprint matches + happy = ( + key.get_name() in self.__allowed_keys + and key.get_fingerprint() == expected + ) + # Secondary check: if test wants assertions about cert data if ( - key.get_name() in self.__allowed_keys and - key.get_fingerprint() == expected + self.__expected_public_blob is not None + and key.public_blob != self.__expected_public_blob ): - return paramiko.AUTH_SUCCESSFUL - return paramiko.AUTH_FAILED + happy = False + return paramiko.AUTH_SUCCESSFUL if happy else paramiko.AUTH_FAILED def check_channel_request(self, kind, chanid): return paramiko.OPEN_SUCCEEDED def check_channel_exec_request(self, channel, command): - if command != b'yes': + if command != b"yes": return False return True + def check_channel_env_request(self, channel, name, value): + if name == "INVALID_ENV": + return False + + if not hasattr(channel, "env"): + setattr(channel, "env", {}) + + channel.env[name] = value + return True -class SSHClientTest (unittest.TestCase): +class ClientTest(unittest.TestCase): def setUp(self): self.sockl = socket.socket() - self.sockl.bind(('localhost', 0)) + self.sockl.bind(("localhost", 0)) self.sockl.listen(1) self.addr, self.port = self.sockl.getsockname() self.connect_kwargs = dict( hostname=self.addr, port=self.port, - username='slowdive', + username="slowdive", look_for_keys=False, ) self.event = threading.Event() + self.kill_event = threading.Event() def tearDown(self): - for attr in "tc ts socks sockl".split(): - if hasattr(self, attr): - getattr(self, attr).close() - - def _run(self, allowed_keys=None, delay=0): + # Shut down client Transport + if hasattr(self, "tc"): + self.tc.close() + # Shut down shared socket + if hasattr(self, "sockl"): + # Signal to server thread that it should shut down early; it checks + # this immediately after accept(). (In scenarios where connection + # actually succeeded during the test, this becomes a no-op.) + self.kill_event.set() + # Forcibly connect to server sock in case the server thread is + # hanging out in its accept() (e.g. if the client side of the test + # fails before it even gets to connecting); there's no other good + # way to force an accept() to exit. + put_a_sock_in_it = socket.socket() + put_a_sock_in_it.connect((self.addr, self.port)) + put_a_sock_in_it.close() + # Then close "our" end of the socket (which _should_ cause the + # accept() to bail out, but does not, for some reason. I blame + # threading.) + self.sockl.close() + + def _run( + self, allowed_keys=None, delay=0, public_blob=None, kill_event=None + ): if allowed_keys is None: allowed_keys = FINGERPRINTS.keys() self.socks, addr = self.sockl.accept() + # If the kill event was set at this point, it indicates an early + # shutdown, so bail out now and don't even try setting up a Transport + # (which will just verbosely die.) + if kill_event and kill_event.is_set(): + self.socks.close() + return self.ts = paramiko.Transport(self.socks) - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) + keypath = _support("test_rsa.key") + host_key = paramiko.RSAKey.from_private_key_file(keypath) self.ts.add_server_key(host_key) - server = NullServer(allowed_keys=allowed_keys) + keypath = _support("test_ecdsa_256.key") + host_key = paramiko.ECDSAKey.from_private_key_file(keypath) + self.ts.add_server_key(host_key) + server = NullServer(allowed_keys=allowed_keys, public_blob=public_blob) if delay: time.sleep(delay) self.ts.start_server(self.event, server) @@ -122,15 +182,21 @@ def _test_connection(self, **kwargs): The exception is ``allowed_keys`` which is stripped and handed to the ``NullServer`` used for testing. """ - run_kwargs = {'allowed_keys': kwargs.pop('allowed_keys', None)} + run_kwargs = {"kill_event": self.kill_event} + for key in ("allowed_keys", "public_blob"): + run_kwargs[key] = kwargs.pop(key, None) # Server setup threading.Thread(target=self._run, kwargs=run_kwargs).start() - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = paramiko.RSAKey.from_private_key_file( + _support("test_rsa.key") + ) public_host_key = paramiko.RSAKey(data=host_key.asbytes()) # Client setup - self.tc = paramiko.SSHClient() - self.tc.get_host_keys().add('[%s]:%d' % (self.addr, self.port), 'ssh-rsa', public_host_key) + self.tc = SSHClient() + self.tc.get_host_keys().add( + "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key + ) # Actual connection self.tc.connect(**dict(self.connect_kwargs, **kwargs)) @@ -139,73 +205,85 @@ def _test_connection(self, **kwargs): self.event.wait(1.0) self.assertTrue(self.event.is_set()) self.assertTrue(self.ts.is_active()) - self.assertEqual('slowdive', self.ts.get_username()) + self.assertEqual("slowdive", self.ts.get_username()) self.assertEqual(True, self.ts.is_authenticated()) + self.assertEqual(False, self.tc.get_transport().gss_kex_used) # Command execution functions? - stdin, stdout, stderr = self.tc.exec_command('yes') + stdin, stdout, stderr = self.tc.exec_command("yes") schan = self.ts.accept(1.0) - schan.send('Hello there.\n') - schan.send_stderr('This is on stderr.\n') + # Nobody else tests the API of exec_command so let's do it here for + # now. :weary: + assert isinstance(stdin, paramiko.ChannelStdinFile) + assert isinstance(stdout, paramiko.ChannelFile) + assert isinstance(stderr, paramiko.ChannelStderrFile) + + schan.send("Hello there.\n") + schan.send_stderr("This is on stderr.\n") schan.close() - self.assertEqual('Hello there.\n', stdout.readline()) - self.assertEqual('', stdout.readline()) - self.assertEqual('This is on stderr.\n', stderr.readline()) - self.assertEqual('', stderr.readline()) + self.assertEqual("Hello there.\n", stdout.readline()) + self.assertEqual("", stdout.readline()) + self.assertEqual("This is on stderr.\n", stderr.readline()) + self.assertEqual("", stderr.readline()) # Cleanup stdin.close() stdout.close() stderr.close() - def test_1_client(self): + +class SSHClientTest(ClientTest): + def test_client(self): """ verify that the SSHClient stuff works too. """ - self._test_connection(password='pygmalion') + self._test_connection(password="pygmalion") - def test_2_client_dsa(self): + def test_client_dsa(self): """ verify that SSHClient works with a DSA key. """ - self._test_connection(key_filename=test_path('test_dss.key')) + self._test_connection(key_filename=_support("test_dss.key")) def test_client_rsa(self): """ verify that SSHClient works with an RSA key. """ - self._test_connection(key_filename=test_path('test_rsa.key')) + self._test_connection(key_filename=_support("test_rsa.key")) - def test_2_5_client_ecdsa(self): + def test_client_ecdsa(self): """ verify that SSHClient works with an ECDSA key. """ - self._test_connection(key_filename=test_path('test_ecdsa_256.key')) + self._test_connection(key_filename=_support("test_ecdsa_256.key")) + + def test_client_ed25519(self): + self._test_connection(key_filename=_support("test_ed25519.key")) - def test_3_multiple_key_files(self): + def test_multiple_key_files(self): """ verify that SSHClient accepts and tries multiple key files. """ # This is dumb :( types_ = { - 'rsa': 'ssh-rsa', - 'dss': 'ssh-dss', - 'ecdsa': 'ecdsa-sha2-nistp256', + "rsa": "ssh-rsa", + "dss": "ssh-dss", + "ecdsa": "ecdsa-sha2-nistp256", } # Various combos of attempted & valid keys # TODO: try every possible combo using itertools functions for attempt, accept in ( - (['rsa', 'dss'], ['dss']), # Original test #3 - (['dss', 'rsa'], ['dss']), # Ordering matters sometimes, sadly - (['dss', 'rsa', 'ecdsa_256'], ['dss']), # Try ECDSA but fail - (['rsa', 'ecdsa_256'], ['ecdsa']), # ECDSA success + (["rsa", "dss"], ["dss"]), # Original test #3 + (["dss", "rsa"], ["dss"]), # Ordering matters sometimes, sadly + (["dss", "rsa", "ecdsa_256"], ["dss"]), # Try ECDSA but fail + (["rsa", "ecdsa_256"], ["ecdsa"]), # ECDSA success ): try: self._test_connection( key_filename=[ - test_path('test_{0}.key'.format(x)) for x in attempt + _support("test_{}.key".format(x)) for x in attempt ], allowed_keys=[types_[x] for x in accept], ) @@ -221,52 +299,93 @@ def test_multiple_key_files_failure(self): """ # Until #387 is fixed we have to catch a high-up exception since # various platforms trigger different errors here >_< - self.assertRaises(SSHException, + self.assertRaises( + SSHException, self._test_connection, - key_filename=[test_path('test_rsa.key')], - allowed_keys=['ecdsa-sha2-nistp256'], + key_filename=[_support("test_rsa.key")], + allowed_keys=["ecdsa-sha2-nistp256"], ) - def test_4_auto_add_policy(self): + def test_certs_allowed_as_key_filename_values(self): + # NOTE: giving cert path here, not key path. (Key path test is below. + # They're similar except for which path is given; the expected auth and + # server-side behavior is 100% identical.) + # NOTE: only bothered whipping up one cert per overall class/family. + for type_ in ("rsa", "dss", "ecdsa_256", "ed25519"): + cert_name = "test_{}.key-cert.pub".format(type_) + cert_path = _support(os.path.join("cert_support", cert_name)) + self._test_connection( + key_filename=cert_path, + public_blob=PublicBlob.from_file(cert_path), + ) + + def test_certs_implicitly_loaded_alongside_key_filename_keys(self): + # NOTE: a regular test_connection() w/ test_rsa.key would incidentally + # test this (because test_xxx.key-cert.pub exists) but incidental tests + # stink, so NullServer and friends were updated to allow assertions + # about the server-side key object's public blob. Thus, we can prove + # that a specific cert was found, along with regular authorization + # succeeding proving that the overall flow works. + for type_ in ("rsa", "dss", "ecdsa_256", "ed25519"): + key_name = "test_{}.key".format(type_) + key_path = _support(os.path.join("cert_support", key_name)) + self._test_connection( + key_filename=key_path, + public_blob=PublicBlob.from_file( + "{}-cert.pub".format(key_path) + ), + ) + + def test_default_key_locations_trigger_cert_loads_if_found(self): + # TODO: what it says on the tin: ~/.ssh/id_rsa tries to load + # ~/.ssh/id_rsa-cert.pub. Right now no other tests actually test that + # code path (!) so we're punting too, sob. + pass + + def test_auto_add_policy(self): """ verify that SSHClient's AutoAddPolicy works. """ threading.Thread(target=self._run).start() - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) - public_host_key = paramiko.RSAKey(data=host_key.asbytes()) + hostname = "[%s]:%d" % (self.addr, self.port) + key_file = _support("test_ecdsa_256.key") + public_host_key = paramiko.ECDSAKey.from_private_key_file(key_file) - self.tc = paramiko.SSHClient() + self.tc = SSHClient() self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.assertEqual(0, len(self.tc.get_host_keys())) - self.tc.connect(password='pygmalion', **self.connect_kwargs) + self.tc.connect(password="pygmalion", **self.connect_kwargs) self.event.wait(1.0) self.assertTrue(self.event.is_set()) self.assertTrue(self.ts.is_active()) - self.assertEqual('slowdive', self.ts.get_username()) + self.assertEqual("slowdive", self.ts.get_username()) self.assertEqual(True, self.ts.is_authenticated()) self.assertEqual(1, len(self.tc.get_host_keys())) - self.assertEqual(public_host_key, self.tc.get_host_keys()['[%s]:%d' % (self.addr, self.port)]['ssh-rsa']) + new_host_key = list(self.tc.get_host_keys()[hostname].values())[0] + self.assertEqual(public_host_key, new_host_key) - def test_5_save_host_keys(self): + def test_save_host_keys(self): """ verify that SSHClient correctly saves a known_hosts file. """ - warnings.filterwarnings('ignore', 'tempnam.*') + warnings.filterwarnings("ignore", "tempnam.*") - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = paramiko.RSAKey.from_private_key_file( + _support("test_rsa.key") + ) public_host_key = paramiko.RSAKey(data=host_key.asbytes()) fd, localname = mkstemp() os.close(fd) - client = paramiko.SSHClient() - self.assertEquals(0, len(client.get_host_keys())) + client = SSHClient() + assert len(client.get_host_keys()) == 0 - host_id = '[%s]:%d' % (self.addr, self.port) + host_id = "[%s]:%d" % (self.addr, self.port) - client.get_host_keys().add(host_id, 'ssh-rsa', public_host_key) - self.assertEquals(1, len(client.get_host_keys())) - self.assertEquals(public_host_key, client.get_host_keys()[host_id]['ssh-rsa']) + client.get_host_keys().add(host_id, "ssh-rsa", public_host_key) + assert len(client.get_host_keys()) == 1 + assert public_host_key == client.get_host_keys()[host_id]["ssh-rsa"] client.save_host_keys(localname) @@ -275,41 +394,41 @@ def test_5_save_host_keys(self): os.unlink(localname) - def test_6_cleanup(self): + def test_cleanup(self): """ verify that when an SSHClient is collected, its transport (and the transport's packetizer) is closed. """ - # Unclear why this is borked on Py3, but it is, and does not seem worth - # pursuing at the moment. Skipped on PyPy because it fails on travis - # for unknown reasons, works fine locally. - # XXX: It's the release of the references to e.g packetizer that fails - # in py3... - if not PY2 or platform.python_implementation() == "PyPy": + # Skipped on PyPy because it fails on travis for unknown reasons + if platform.python_implementation() == "PyPy": return + threading.Thread(target=self._run).start() - self.tc = paramiko.SSHClient() + self.tc = SSHClient() self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.assertEqual(0, len(self.tc.get_host_keys())) - self.tc.connect(**dict(self.connect_kwargs, password='pygmalion')) + assert len(self.tc.get_host_keys()) == 0 + self.tc.connect(**dict(self.connect_kwargs, password="pygmalion")) self.event.wait(1.0) - self.assertTrue(self.event.is_set()) - self.assertTrue(self.ts.is_active()) + assert self.event.is_set() + assert self.ts.is_active() p = weakref.ref(self.tc._transport.packetizer) - self.assertTrue(p() is not None) + assert p() is not None self.tc.close() del self.tc # force a collection to see whether the SSHClient object is deallocated - # correctly. 2 GCs are needed to make sure it's really collected on - # PyPy + # 2 GCs are needed on PyPy, time is needed for Python 3 + # TODO: this still fails randomly under CircleCI under Python 3.7, 3.8 + # at the very least. bumped sleep 0.3->1.0s but the underlying + # functionality should get reevaluated once we drop Python 2. + time.sleep(1) gc.collect() gc.collect() - self.assertTrue(p() is None) + assert p() is None def test_client_can_be_used_as_context_manager(self): """ @@ -317,11 +436,11 @@ def test_client_can_be_used_as_context_manager(self): """ threading.Thread(target=self._run).start() - with paramiko.SSHClient() as tc: + with SSHClient() as tc: self.tc = tc self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.assertEquals(0, len(self.tc.get_host_keys())) - self.tc.connect(**dict(self.connect_kwargs, password='pygmalion')) + assert len(self.tc.get_host_keys()) == 0 + self.tc.connect(**dict(self.connect_kwargs, password="pygmalion")) self.event.wait(1.0) self.assertTrue(self.event.is_set()) @@ -331,41 +450,272 @@ def test_client_can_be_used_as_context_manager(self): self.assertTrue(self.tc._transport is None) - def test_7_banner_timeout(self): + def test_banner_timeout(self): """ verify that the SSHClient has a configurable banner timeout. """ # Start the thread with a 1 second wait. - threading.Thread(target=self._run, kwargs={'delay': 1}).start() - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) + threading.Thread(target=self._run, kwargs={"delay": 1}).start() + host_key = paramiko.RSAKey.from_private_key_file( + _support("test_rsa.key") + ) public_host_key = paramiko.RSAKey(data=host_key.asbytes()) - self.tc = paramiko.SSHClient() - self.tc.get_host_keys().add('[%s]:%d' % (self.addr, self.port), 'ssh-rsa', public_host_key) + self.tc = SSHClient() + self.tc.get_host_keys().add( + "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key + ) # Connect with a half second banner timeout. kwargs = dict(self.connect_kwargs, banner_timeout=0.5) - self.assertRaises( - paramiko.SSHException, - self.tc.connect, - **kwargs - ) + self.assertRaises(paramiko.SSHException, self.tc.connect, **kwargs) - def test_8_auth_trickledown(self): + def test_auth_trickledown(self): """ Failed key auth doesn't prevent subsequent pw auth from succeeding """ # NOTE: re #387, re #394 # If pkey module used within Client._auth isn't correctly handling auth # errors (e.g. if it allows things like ValueError to bubble up as per - # midway thru #394) client.connect() will fail (at key load step) + # midway through #394) client.connect() will fail (at key load step) # instead of succeeding (at password step) kwargs = dict( # Password-protected key whose passphrase is not 'pygmalion' (it's # 'television' as per tests/test_pkey.py). NOTE: must use # key_filename, loading the actual key here with PKey will except # immediately; we're testing the try/except crap within Client. - key_filename=[test_path('test_rsa_password.key')], + key_filename=[_support("test_rsa_password.key")], # Actual password for default 'slowdive' user - password='pygmalion', + password="pygmalion", + ) + self._test_connection(**kwargs) + + @slow + def test_auth_timeout(self): + """ + verify that the SSHClient has a configurable auth timeout + """ + # Connect with a half second auth timeout + self.assertRaises( + AuthenticationException, + self._test_connection, + password="unresponsive-server", + auth_timeout=0.5, ) + + @requires_gss_auth + def test_auth_trickledown_gsskex(self): + """ + Failed gssapi-keyex doesn't prevent subsequent key from succeeding + """ + kwargs = dict(gss_kex=True, key_filename=[_support("test_rsa.key")]) self._test_connection(**kwargs) + + @requires_gss_auth + def test_auth_trickledown_gssauth(self): + """ + Failed gssapi-with-mic doesn't prevent subsequent key from succeeding + """ + kwargs = dict(gss_auth=True, key_filename=[_support("test_rsa.key")]) + self._test_connection(**kwargs) + + def test_reject_policy(self): + """ + verify that SSHClient's RejectPolicy works. + """ + threading.Thread(target=self._run).start() + + self.tc = SSHClient() + self.tc.set_missing_host_key_policy(paramiko.RejectPolicy()) + self.assertEqual(0, len(self.tc.get_host_keys())) + self.assertRaises( + paramiko.SSHException, + self.tc.connect, + password="pygmalion", + **self.connect_kwargs + ) + + @requires_gss_auth + def test_reject_policy_gsskex(self): + """ + verify that SSHClient's RejectPolicy works, + even if gssapi-keyex was enabled but not used. + """ + # Test for a bug present in paramiko versions released before + # 2017-08-01 + threading.Thread(target=self._run).start() + + self.tc = SSHClient() + self.tc.set_missing_host_key_policy(paramiko.RejectPolicy()) + self.assertEqual(0, len(self.tc.get_host_keys())) + self.assertRaises( + paramiko.SSHException, + self.tc.connect, + password="pygmalion", + gss_kex=True, + **self.connect_kwargs + ) + + def _client_host_key_bad(self, host_key): + threading.Thread(target=self._run).start() + hostname = "[%s]:%d" % (self.addr, self.port) + + self.tc = SSHClient() + self.tc.set_missing_host_key_policy(paramiko.WarningPolicy()) + known_hosts = self.tc.get_host_keys() + known_hosts.add(hostname, host_key.get_name(), host_key) + + self.assertRaises( + paramiko.BadHostKeyException, + self.tc.connect, + password="pygmalion", + **self.connect_kwargs + ) + + def _client_host_key_good(self, ktype, kfile): + threading.Thread(target=self._run).start() + hostname = "[%s]:%d" % (self.addr, self.port) + + self.tc = SSHClient() + self.tc.set_missing_host_key_policy(paramiko.RejectPolicy()) + host_key = ktype.from_private_key_file(_support(kfile)) + known_hosts = self.tc.get_host_keys() + known_hosts.add(hostname, host_key.get_name(), host_key) + + self.tc.connect(password="pygmalion", **self.connect_kwargs) + self.event.wait(1.0) + self.assertTrue(self.event.is_set()) + self.assertTrue(self.ts.is_active()) + self.assertEqual(True, self.ts.is_authenticated()) + + def test_host_key_negotiation_1(self): + host_key = paramiko.ECDSAKey.generate() + self._client_host_key_bad(host_key) + + def test_host_key_negotiation_2(self): + host_key = paramiko.RSAKey.generate(2048) + self._client_host_key_bad(host_key) + + def test_host_key_negotiation_3(self): + self._client_host_key_good(paramiko.ECDSAKey, "test_ecdsa_256.key") + + def test_host_key_negotiation_4(self): + self._client_host_key_good(paramiko.RSAKey, "test_rsa.key") + + def _setup_for_env(self): + threading.Thread(target=self._run).start() + + self.tc = SSHClient() + self.tc.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + self.assertEqual(0, len(self.tc.get_host_keys())) + self.tc.connect( + self.addr, self.port, username="slowdive", password="pygmalion" + ) + + self.event.wait(1.0) + self.assertTrue(self.event.isSet()) + self.assertTrue(self.ts.is_active()) + + def test_update_environment(self): + """ + Verify that environment variables can be set by the client. + """ + self._setup_for_env() + target_env = {b"A": b"B", b"C": b"d"} + + self.tc.exec_command("yes", environment=target_env) + schan = self.ts.accept(1.0) + self.assertEqual(target_env, getattr(schan, "env", {})) + schan.close() + + @unittest.skip("Clients normally fail silently, thus so do we, for now") + def test_env_update_failures(self): + self._setup_for_env() + with self.assertRaises(SSHException) as manager: + # Verify that a rejection by the server can be detected + self.tc.exec_command("yes", environment={b"INVALID_ENV": b""}) + self.assertTrue( + "INVALID_ENV" in str(manager.exception), + "Expected variable name in error message", + ) + self.assertTrue( + isinstance(manager.exception.args[1], SSHException), + "Expected original SSHException in exception", + ) + + def test_missing_key_policy_accepts_classes_or_instances(self): + """ + Client.missing_host_key_policy() can take classes or instances. + """ + # AN ACTUAL UNIT TEST?! GOOD LORD + # (But then we have to test a private API...meh.) + client = SSHClient() + # Default + assert isinstance(client._policy, paramiko.RejectPolicy) + # Hand in an instance (classic behavior) + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + assert isinstance(client._policy, paramiko.AutoAddPolicy) + # Hand in just the class (new behavior) + client.set_missing_host_key_policy(paramiko.AutoAddPolicy) + assert isinstance(client._policy, paramiko.AutoAddPolicy) + + @patch("paramiko.client.Transport") + def test_disabled_algorithms_defaults_to_None(self, Transport): + SSHClient().connect("host", sock=Mock(), password="no") + assert Transport.call_args[1]["disabled_algorithms"] is None + + @patch("paramiko.client.Transport") + def test_disabled_algorithms_passed_directly_if_given(self, Transport): + SSHClient().connect( + "host", + sock=Mock(), + password="no", + disabled_algorithms={"keys": ["ssh-dss"]}, + ) + call_arg = Transport.call_args[1]["disabled_algorithms"] + assert call_arg == {"keys": ["ssh-dss"]} + + +class PasswordPassphraseTests(ClientTest): + # TODO: most of these could reasonably be set up to use mocks/assertions + # (e.g. "gave passphrase -> expect PKey was given it as the passphrase") + # instead of suffering a real connection cycle. + # TODO: in that case, move the below to be part of an integration suite? + + def test_password_kwarg_works_for_password_auth(self): + # Straightforward / duplicate of earlier basic password test. + self._test_connection(password="pygmalion") + + # TODO: more granular exception pending #387; should be signaling "no auth + # methods available" because no key and no password + @raises(SSHException) + def test_passphrase_kwarg_not_used_for_password_auth(self): + # Using the "right" password in the "wrong" field shouldn't work. + self._test_connection(passphrase="pygmalion") + + def test_passphrase_kwarg_used_for_key_passphrase(self): + # Straightforward again, with new passphrase kwarg. + self._test_connection( + key_filename=_support("test_rsa_password.key"), + passphrase="television", + ) + + def test_password_kwarg_used_for_passphrase_when_no_passphrase_kwarg_given( + self + ): # noqa + # Backwards compatibility: passphrase in the password field. + self._test_connection( + key_filename=_support("test_rsa_password.key"), + password="television", + ) + + @raises(AuthenticationException) # TODO: more granular + def test_password_kwarg_not_used_for_passphrase_when_passphrase_kwarg_given( # noqa + self + ): + # Sanity: if we're given both fields, the password field is NOT used as + # a passphrase. + self._test_connection( + key_filename=_support("test_rsa_password.key"), + password="television", + passphrase="wat? lol no", + ) diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 000000000..b46dc7b42 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,1010 @@ +# This file is part of Paramiko and subject to the license in /LICENSE in this +# repository + +from os.path import expanduser +from socket import gaierror + +from paramiko.py3compat import string_types + +from invoke import Result +from mock import patch +from pytest import raises, mark, fixture + +from paramiko import ( + SSHConfig, + SSHConfigDict, + CouldNotCanonicalize, + ConfigParseError, +) + +from .util import _config + + +@fixture +def socket(): + """ + Patch all of socket.* in our config module to prevent eg real DNS lookups. + + Also forces getaddrinfo (used in our addressfamily lookup stuff) to always + fail by default to mimic usual lack of AddressFamily related crap. + + Callers who want to mock DNS lookups can then safely assume gethostbyname() + will be in use. + """ + with patch("paramiko.config.socket") as mocket: + # Reinstate gaierror as an actual exception and not a sub-mock. + # (Presumably this would work with any exception, but why not use the + # real one?) + mocket.gaierror = gaierror + # Patch out getaddrinfo, used to detect family-specific IP lookup - + # only useful for a few specific tests. + mocket.getaddrinfo.side_effect = mocket.gaierror + # Patch out getfqdn to return some real string for when it gets called; + # some code (eg tokenization) gets mad w/ MagicMocks + mocket.getfqdn.return_value = "some.fake.fqdn" + mocket.gethostname.return_value = "local.fake.fqdn" + yield mocket + + +def load_config(name): + return SSHConfig.from_path(_config(name)) + + +class TestSSHConfig(object): + def setup(self): + self.config = load_config("robey") + + def test_init(self): + # No args! + with raises(TypeError): + SSHConfig("uh oh!") + # No args. + assert not SSHConfig()._config + + def test_from_text(self): + config = SSHConfig.from_text("User foo") + assert config.lookup("foo.example.com")["user"] == "foo" + + def test_from_file(self): + with open(_config("robey")) as flo: + config = SSHConfig.from_file(flo) + assert config.lookup("whatever")["user"] == "robey" + + def test_from_path(self): + # NOTE: DO NOT replace with use of load_config() :D + config = SSHConfig.from_path(_config("robey")) + assert config.lookup("meh.example.com")["port"] == "3333" + + def test_parse_config(self): + expected = [ + {"host": ["*"], "config": {}}, + { + "host": ["*"], + "config": {"identityfile": ["~/.ssh/id_rsa"], "user": "robey"}, + }, + { + "host": ["*.example.com"], + "config": {"user": "bjork", "port": "3333"}, + }, + {"host": ["*"], "config": {"crazy": "something dumb"}}, + { + "host": ["spoo.example.com"], + "config": {"crazy": "something else"}, + }, + ] + assert self.config._config == expected + + @mark.parametrize( + "host,values", + ( + ( + "irc.danger.com", + { + "crazy": "something dumb", + "hostname": "irc.danger.com", + "user": "robey", + }, + ), + ( + "irc.example.com", + { + "crazy": "something dumb", + "hostname": "irc.example.com", + "user": "robey", + "port": "3333", + }, + ), + ( + "spoo.example.com", + { + "crazy": "something dumb", + "hostname": "spoo.example.com", + "user": "robey", + "port": "3333", + }, + ), + ), + ) + def test_host_config(self, host, values): + expected = dict( + values, hostname=host, identityfile=[expanduser("~/.ssh/id_rsa")] + ) + assert self.config.lookup(host) == expected + + def test_fabric_issue_33(self): + config = SSHConfig.from_text( + """ +Host www13.* + Port 22 + +Host *.example.com + Port 2222 + +Host * + Port 3333 +""" + ) + host = "www13.example.com" + expected = {"hostname": host, "port": "22"} + assert config.lookup(host) == expected + + def test_proxycommand_config_equals_parsing(self): + """ + ProxyCommand should not split on equals signs within the value. + """ + config = SSHConfig.from_text( + """ +Host space-delimited + ProxyCommand foo bar=biz baz + +Host equals-delimited + ProxyCommand=foo bar=biz baz +""" + ) + for host in ("space-delimited", "equals-delimited"): + value = config.lookup(host)["proxycommand"] + assert value == "foo bar=biz baz" + + def test_proxycommand_interpolation(self): + """ + ProxyCommand should perform interpolation on the value + """ + config = SSHConfig.from_text( + """ +Host specific + Port 37 + ProxyCommand host %h port %p lol + +Host portonly + Port 155 + +Host * + Port 25 + ProxyCommand host %h port %p +""" + ) + for host, val in ( + ("foo.com", "host foo.com port 25"), + ("specific", "host specific port 37 lol"), + ("portonly", "host portonly port 155"), + ): + assert config.lookup(host)["proxycommand"] == val + + def test_proxycommand_tilde_expansion(self): + """ + Tilde (~) should be expanded inside ProxyCommand + """ + config = SSHConfig.from_text( + """ +Host test + ProxyCommand ssh -F ~/.ssh/test_config bastion nc %h %p +""" + ) + expected = "ssh -F {}/.ssh/test_config bastion nc test 22".format( + expanduser("~") + ) + got = config.lookup("test")["proxycommand"] + assert got == expected + + @patch("paramiko.config.getpass") + def test_controlpath_token_expansion(self, getpass, socket): + getpass.getuser.return_value = "gandalf" + config = SSHConfig.from_text( + """ +Host explicit_user + User root + ControlPath user %u remoteuser %r + +Host explicit_host + HostName ohai + ControlPath remoteuser %r host %h orighost %n + +Host hashbrowns + ControlPath %C + """ + ) + result = config.lookup("explicit_user")["controlpath"] + # Remote user is User val, local user is User val + assert result == "user gandalf remoteuser root" + result = config.lookup("explicit_host")["controlpath"] + # Remote user falls back to local user; host and orighost may differ + assert result == "remoteuser gandalf host ohai orighost explicit_host" + # Supports %C + result = config.lookup("hashbrowns")["controlpath"] + assert result == "a438e7dbf5308b923aba9db8fe2ca63447ac8688" + + def test_negation(self): + config = SSHConfig.from_text( + """ +Host www13.* !*.example.com + Port 22 + +Host *.example.com !www13.* + Port 2222 + +Host www13.* + Port 8080 + +Host * + Port 3333 +""" + ) + host = "www13.example.com" + expected = {"hostname": host, "port": "8080"} + assert config.lookup(host) == expected + + def test_proxycommand(self): + config = SSHConfig.from_text( + """ +Host proxy-with-equal-divisor-and-space +ProxyCommand = foo=bar + +Host proxy-with-equal-divisor-and-no-space +ProxyCommand=foo=bar + +Host proxy-without-equal-divisor +ProxyCommand foo=bar:%h-%p +""" + ) + for host, values in { + "proxy-with-equal-divisor-and-space": { + "hostname": "proxy-with-equal-divisor-and-space", + "proxycommand": "foo=bar", + }, + "proxy-with-equal-divisor-and-no-space": { + "hostname": "proxy-with-equal-divisor-and-no-space", + "proxycommand": "foo=bar", + }, + "proxy-without-equal-divisor": { + "hostname": "proxy-without-equal-divisor", + "proxycommand": "foo=bar:proxy-without-equal-divisor-22", + }, + }.items(): + + assert config.lookup(host) == values + + @patch("paramiko.config.getpass") + def test_identityfile(self, getpass, socket): + getpass.getuser.return_value = "gandalf" + config = SSHConfig.from_text( + """ +IdentityFile id_dsa0 + +Host * +IdentityFile id_dsa1 + +Host dsa2 +IdentityFile id_dsa2 + +Host dsa2* +IdentityFile id_dsa22 + +Host hashbrowns +IdentityFile %C +""" + ) + for host, values in { + "foo": {"hostname": "foo", "identityfile": ["id_dsa0", "id_dsa1"]}, + "dsa2": { + "hostname": "dsa2", + "identityfile": ["id_dsa0", "id_dsa1", "id_dsa2", "id_dsa22"], + }, + "dsa22": { + "hostname": "dsa22", + "identityfile": ["id_dsa0", "id_dsa1", "id_dsa22"], + }, + "hashbrowns": { + "hostname": "hashbrowns", + "identityfile": [ + "id_dsa0", + "id_dsa1", + "a438e7dbf5308b923aba9db8fe2ca63447ac8688", + ], + }, + }.items(): + assert config.lookup(host) == values + + def test_config_addressfamily_and_lazy_fqdn(self): + """ + Ensure the code path honoring non-'all' AddressFamily doesn't asplode + """ + config = SSHConfig.from_text( + """ +AddressFamily inet +IdentityFile something_%l_using_fqdn +""" + ) + assert config.lookup( + "meh" + ) # will die during lookup() if bug regresses + + def test_config_dos_crlf_succeeds(self): + config = SSHConfig.from_text( + """ +Host abcqwerty\r\nHostName 127.0.0.1\r\n +""" + ) + assert config.lookup("abcqwerty")["hostname"] == "127.0.0.1" + + def test_get_hostnames(self): + expected = {"*", "*.example.com", "spoo.example.com"} + assert self.config.get_hostnames() == expected + + def test_quoted_host_names(self): + config = SSHConfig.from_text( + """ +Host "param pam" param "pam" + Port 1111 + +Host "param2" + Port 2222 + +Host param3 parara + Port 3333 + +Host param4 "p a r" "p" "par" para + Port 4444 +""" + ) + res = { + "param pam": {"hostname": "param pam", "port": "1111"}, + "param": {"hostname": "param", "port": "1111"}, + "pam": {"hostname": "pam", "port": "1111"}, + "param2": {"hostname": "param2", "port": "2222"}, + "param3": {"hostname": "param3", "port": "3333"}, + "parara": {"hostname": "parara", "port": "3333"}, + "param4": {"hostname": "param4", "port": "4444"}, + "p a r": {"hostname": "p a r", "port": "4444"}, + "p": {"hostname": "p", "port": "4444"}, + "par": {"hostname": "par", "port": "4444"}, + "para": {"hostname": "para", "port": "4444"}, + } + for host, values in res.items(): + assert config.lookup(host) == values + + def test_quoted_params_in_config(self): + config = SSHConfig.from_text( + """ +Host "param pam" param "pam" + IdentityFile id_rsa + +Host "param2" + IdentityFile "test rsa key" + +Host param3 parara + IdentityFile id_rsa + IdentityFile "test rsa key" +""" + ) + res = { + "param pam": {"hostname": "param pam", "identityfile": ["id_rsa"]}, + "param": {"hostname": "param", "identityfile": ["id_rsa"]}, + "pam": {"hostname": "pam", "identityfile": ["id_rsa"]}, + "param2": {"hostname": "param2", "identityfile": ["test rsa key"]}, + "param3": { + "hostname": "param3", + "identityfile": ["id_rsa", "test rsa key"], + }, + "parara": { + "hostname": "parara", + "identityfile": ["id_rsa", "test rsa key"], + }, + } + for host, values in res.items(): + assert config.lookup(host) == values + + def test_quoted_host_in_config(self): + conf = SSHConfig() + correct_data = { + "param": ["param"], + '"param"': ["param"], + "param pam": ["param", "pam"], + '"param" "pam"': ["param", "pam"], + '"param" pam': ["param", "pam"], + 'param "pam"': ["param", "pam"], + 'param "pam" p': ["param", "pam", "p"], + '"param" pam "p"': ["param", "pam", "p"], + '"pa ram"': ["pa ram"], + '"pa ram" pam': ["pa ram", "pam"], + 'param "p a m"': ["param", "p a m"], + } + incorrect_data = ['param"', '"param', 'param "pam', 'param "pam" "p a'] + for host, values in correct_data.items(): + assert conf._get_hosts(host) == values + for host in incorrect_data: + with raises(ConfigParseError): + conf._get_hosts(host) + + def test_invalid_line_format_excepts(self): + with raises(ConfigParseError): + load_config("invalid") + + def test_proxycommand_none_issue_418(self): + config = SSHConfig.from_text( + """ +Host proxycommand-standard-none + ProxyCommand None + +Host proxycommand-with-equals-none + ProxyCommand=None +""" + ) + for host, values in { + "proxycommand-standard-none": { + "hostname": "proxycommand-standard-none" + }, + "proxycommand-with-equals-none": { + "hostname": "proxycommand-with-equals-none" + }, + }.items(): + + assert config.lookup(host) == values + + def test_proxycommand_none_masking(self): + # Re: https://github.com/paramiko/paramiko/issues/670 + config = SSHConfig.from_text( + """ +Host specific-host + ProxyCommand none + +Host other-host + ProxyCommand other-proxy + +Host * + ProxyCommand default-proxy +""" + ) + # When bug is present, the full stripping-out of specific-host's + # ProxyCommand means it actually appears to pick up the default + # ProxyCommand value instead, due to cascading. It should (for + # backwards compatibility reasons in 1.x/2.x) appear completely blank, + # as if the host had no ProxyCommand whatsoever. + # Threw another unrelated host in there just for sanity reasons. + assert "proxycommand" not in config.lookup("specific-host") + assert config.lookup("other-host")["proxycommand"] == "other-proxy" + cmd = config.lookup("some-random-host")["proxycommand"] + assert cmd == "default-proxy" + + def test_hostname_tokenization(self): + result = load_config("hostname-tokenized").lookup("whatever") + assert result["hostname"] == "prefix.whatever" + + +class TestSSHConfigDict(object): + def test_SSHConfigDict_construct_empty(self): + assert not SSHConfigDict() + + def test_SSHConfigDict_construct_from_list(self): + assert SSHConfigDict([(1, 2)])[1] == 2 + + def test_SSHConfigDict_construct_from_dict(self): + assert SSHConfigDict({1: 2})[1] == 2 + + @mark.parametrize("true_ish", ("yes", "YES", "Yes", True)) + def test_SSHConfigDict_as_bool_true_ish(self, true_ish): + assert SSHConfigDict({"key": true_ish}).as_bool("key") is True + + @mark.parametrize("false_ish", ("no", "NO", "No", False)) + def test_SSHConfigDict_as_bool(self, false_ish): + assert SSHConfigDict({"key": false_ish}).as_bool("key") is False + + @mark.parametrize("int_val", ("42", 42)) + def test_SSHConfigDict_as_int(self, int_val): + assert SSHConfigDict({"key": int_val}).as_int("key") == 42 + + @mark.parametrize("non_int", ("not an int", None, object())) + def test_SSHConfigDict_as_int_failures(self, non_int): + conf = SSHConfigDict({"key": non_int}) + + try: + int(non_int) + except Exception as e: + exception_type = type(e) + + with raises(exception_type): + conf.as_int("key") + + def test_SSHConfig_host_dicts_are_SSHConfigDict_instances(self): + config = SSHConfig.from_text( + """ +Host *.example.com + Port 2222 + +Host * + Port 3333 +""" + ) + assert config.lookup("foo.example.com").as_int("port") == 2222 + + def test_SSHConfig_wildcard_host_dicts_are_SSHConfigDict_instances(self): + config = SSHConfig.from_text( + """ +Host *.example.com + Port 2222 + +Host * + Port 3333 +""" + ) + assert config.lookup("anything-else").as_int("port") == 3333 + + +class TestHostnameCanonicalization(object): + # NOTE: this class uses on-disk configs, and ones with real (at time of + # writing) DNS names, so that one can easily test OpenSSH's behavior using + # "ssh -F path/to/file.config -G ". + + def test_off_by_default(self, socket): + result = load_config("basic").lookup("www") + assert result["hostname"] == "www" + assert "user" not in result + assert not socket.gethostbyname.called + + def test_explicit_no_same_as_default(self, socket): + result = load_config("no-canon").lookup("www") + assert result["hostname"] == "www" + assert "user" not in result + assert not socket.gethostbyname.called + + @mark.parametrize( + "config_name", + ("canon", "canon-always", "canon-local", "canon-local-always"), + ) + def test_canonicalization_base_cases(self, socket, config_name): + result = load_config(config_name).lookup("www") + assert result["hostname"] == "www.paramiko.org" + assert result["user"] == "rando" + socket.gethostbyname.assert_called_once_with("www.paramiko.org") + + def test_uses_getaddrinfo_when_AddressFamily_given(self, socket): + # Undo default 'always fails' mock + socket.getaddrinfo.side_effect = None + socket.getaddrinfo.return_value = [True] # just need 1st value truthy + result = load_config("canon-ipv4").lookup("www") + assert result["hostname"] == "www.paramiko.org" + assert result["user"] == "rando" + assert not socket.gethostbyname.called + gai_args = socket.getaddrinfo.call_args[0] + assert gai_args[0] == "www.paramiko.org" + assert gai_args[2] is socket.AF_INET # Mocked, but, still useful + + @mark.skip + def test_empty_CanonicalDomains_canonicalizes_despite_noop(self, socket): + # Confirmed this is how OpenSSH behaves as well. Bit silly, but. + # TODO: this requires modifying SETTINGS_REGEX, which is a mite scary + # (honestly I'd prefer to move to a real parser lib anyhow) and since + # this is a very dumb corner case, it's marked skip for now. + result = load_config("empty-canon").lookup("www") + assert result["hostname"] == "www" # no paramiko.org + assert "user" not in result # did not discover canonicalized block + + def test_CanonicalDomains_may_be_set_to_space_separated_list(self, socket): + # Test config has a bogus domain, followed by paramiko.org + socket.gethostbyname.side_effect = [socket.gaierror, True] + result = load_config("multi-canon-domains").lookup("www") + assert result["hostname"] == "www.paramiko.org" + assert result["user"] == "rando" + assert [x[0][0] for x in socket.gethostbyname.call_args_list] == [ + "www.not-a-real-tld", + "www.paramiko.org", + ] + + def test_canonicalization_applies_to_single_dot_by_default(self, socket): + result = load_config("deep-canon").lookup("sub.www") + assert result["hostname"] == "sub.www.paramiko.org" + assert result["user"] == "deep" + + def test_canonicalization_not_applied_to_two_dots_by_default(self, socket): + result = load_config("deep-canon").lookup("subber.sub.www") + assert result["hostname"] == "subber.sub.www" + assert "user" not in result + + def test_hostname_depth_controllable_with_max_dots_directive(self, socket): + # This config sets MaxDots of 2, so now canonicalization occurs + result = load_config("deep-canon-maxdots").lookup("subber.sub.www") + assert result["hostname"] == "subber.sub.www.paramiko.org" + assert result["user"] == "deeper" + + def test_max_dots_may_be_zero(self, socket): + result = load_config("zero-maxdots").lookup("sub.www") + assert result["hostname"] == "sub.www" + assert "user" not in result + + def test_fallback_yes_does_not_canonicalize_or_error(self, socket): + socket.gethostbyname.side_effect = socket.gaierror + result = load_config("fallback-yes").lookup("www") + assert result["hostname"] == "www" + assert "user" not in result + + def test_fallback_no_causes_errors_for_unresolvable_names(self, socket): + socket.gethostbyname.side_effect = socket.gaierror + with raises(CouldNotCanonicalize) as info: + load_config("fallback-no").lookup("doesnotexist") + assert str(info.value) == "doesnotexist" + + def test_identityfile_continues_being_appended_to(self, socket): + result = load_config("canon").lookup("www") + assert result["identityfile"] == ["base.key", "canonicalized.key"] + + +@mark.skip +class TestCanonicalizationOfCNAMEs(object): + def test_permitted_cnames_may_be_one_to_one_mapping(self): + # CanonicalizePermittedCNAMEs *.foo.com:*.bar.com + pass + + def test_permitted_cnames_may_be_one_to_many_mapping(self): + # CanonicalizePermittedCNAMEs *.foo.com:*.bar.com,*.biz.com + pass + + def test_permitted_cnames_may_be_many_to_one_mapping(self): + # CanonicalizePermittedCNAMEs *.foo.com,*.bar.com:*.biz.com + pass + + def test_permitted_cnames_may_be_many_to_many_mapping(self): + # CanonicalizePermittedCNAMEs *.foo.com,*.bar.com:*.biz.com,*.baz.com + pass + + def test_permitted_cnames_may_be_multiple_mappings(self): + # CanonicalizePermittedCNAMEs *.foo.com,*.bar.com *.biz.com:*.baz.com + pass + + def test_permitted_cnames_may_be_multiple_complex_mappings(self): + # Same as prev but with multiple patterns on both ends in both args + pass + + +class TestMatchAll(object): + def test_always_matches(self): + result = load_config("match-all").lookup("general") + assert result["user"] == "awesome" + + def test_may_not_mix_with_non_canonical_keywords(self): + for config in ("match-all-and-more", "match-all-and-more-before"): + with raises(ConfigParseError): + load_config(config).lookup("whatever") + + def test_may_come_after_canonical(self, socket): + result = load_config("match-all-after-canonical").lookup("www") + assert result["user"] == "awesome" + + def test_may_not_come_before_canonical(self, socket): + with raises(ConfigParseError): + load_config("match-all-before-canonical") + + def test_after_canonical_not_loaded_when_non_canonicalized(self, socket): + result = load_config("match-canonical-no").lookup("a-host") + assert "user" not in result + + +def _expect(success_on): + """ + Returns a side_effect-friendly Invoke success result for given command(s). + + Ensures that any other commands fail; this is useful for testing 'Match + exec' because it means all other such clauses under test act like no-ops. + + :param success_on: + Single string or list of strings, noting commands that should appear to + succeed. + """ + if isinstance(success_on, string_types): + success_on = [success_on] + + def inner(command, *args, **kwargs): + # Sanity checking - we always expect that invoke.run is called with + # these. + assert kwargs.get("hide", None) == "stdout" + assert kwargs.get("warn", None) is True + # Fake exit + exit = 0 if command in success_on else 1 + return Result(exited=exit) + + return inner + + +class TestMatchExec(object): + @patch("paramiko.config.invoke", new=None) + @patch("paramiko.config.invoke_import_error", new=ImportError("meh")) + def test_raises_invoke_ImportErrors_at_runtime(self): + # Not an ideal test, but I don't know of a non-bad way to fake out + # module-time ImportErrors. So we mock the symptoms. Meh! + with raises(ImportError) as info: + load_config("match-exec").lookup("oh-noes") + assert str(info.value) == "meh" + + @patch("paramiko.config.invoke.run") + @mark.parametrize( + "cmd,user", + [ + ("unquoted", "rando"), + ("quoted", "benjamin"), + ("quoted spaced", "neil"), + ], + ) + def test_accepts_single_possibly_quoted_argument(self, run, cmd, user): + run.side_effect = _expect(cmd) + result = load_config("match-exec").lookup("whatever") + assert result["user"] == user + + @patch("paramiko.config.invoke.run") + def test_does_not_match_nonzero_exit_codes(self, run): + # Nothing will succeed -> no User ever gets loaded + run.return_value = Result(exited=1) + result = load_config("match-exec").lookup("whatever") + assert "user" not in result + + @patch("paramiko.config.getpass") + @patch("paramiko.config.invoke.run") + def test_tokenizes_argument(self, run, getpass, socket): + getpass.getuser.return_value = "gandalf" + # Actual exec value is "%C %d %h %L %l %n %p %r %u" + parts = ( + "bf5ba06778434a9384ee4217e462f64888bd0cd2", + expanduser("~"), + "configured", + "local", + "some.fake.fqdn", + "target", + "22", + "intermediate", + "gandalf", + ) + run.side_effect = _expect(" ".join(parts)) + result = load_config("match-exec").lookup("target") + assert result["port"] == "1337" + + @patch("paramiko.config.invoke.run") + def test_works_with_canonical(self, run, socket): + # Ensure both stanzas' exec components appear to match + run.side_effect = _expect(["uncanonicalized", "canonicalized"]) + result = load_config("match-exec-canonical").lookup("who-cares") + # Prove both config values got loaded up, across the two passes + assert result["user"] == "defenseless" + assert result["port"] == "8007" + + @patch("paramiko.config.invoke.run") + def test_may_be_negated(self, run): + run.side_effect = _expect("this succeeds") + result = load_config("match-exec-negation").lookup("so-confusing") + # If negation did not work, the first of the two Match exec directives + # would have set User to 'nope' (and/or the second would have NOT set + # User to 'yup') + assert result["user"] == "yup" + + def test_requires_an_argument(self): + with raises(ConfigParseError): + load_config("match-exec-no-arg") + + @patch("paramiko.config.invoke.run") + def test_works_with_tokenized_hostname(self, run): + run.side_effect = _expect("ping target") + result = load_config("hostname-exec-tokenized").lookup("target") + assert result["hostname"] == "pingable.target" + + +class TestMatchHost(object): + def test_matches_target_name_when_no_hostname(self): + result = load_config("match-host").lookup("target") + assert result["user"] == "rand" + + def test_matches_hostname_from_global_setting(self): + # Also works for ones set in regular Host stanzas + result = load_config("match-host-name").lookup("anything") + assert result["user"] == "silly" + + def test_matches_hostname_from_earlier_match(self): + # Corner case: one Match matches original host, sets HostName, + # subsequent Match matches the latter. + result = load_config("match-host-from-match").lookup("original-host") + assert result["user"] == "inner" + + def test_may_be_globbed(self): + result = load_config("match-host-glob-list").lookup("whatever") + assert result["user"] == "matrim" + + def test_may_be_comma_separated_list(self): + for target in ("somehost", "someotherhost"): + result = load_config("match-host-glob-list").lookup(target) + assert result["user"] == "thom" + + def test_comma_separated_list_may_have_internal_negation(self): + conf = load_config("match-host-glob-list") + assert conf.lookup("good")["user"] == "perrin" + assert "user" not in conf.lookup("goof") + + def test_matches_canonicalized_name(self, socket): + # Without 'canonical' explicitly declared, mind. + result = load_config("match-host-canonicalized").lookup("www") + assert result["user"] == "rand" + + def test_works_with_canonical_keyword(self, socket): + # NOTE: distinct from 'happens to be canonicalized' above + result = load_config("match-host-canonicalized").lookup("docs") + assert result["user"] == "eric" + + def test_may_be_negated(self): + conf = load_config("match-host-negated") + assert conf.lookup("docs")["user"] == "jeff" + assert "user" not in conf.lookup("www") + + def test_requires_an_argument(self): + with raises(ConfigParseError): + load_config("match-host-no-arg") + + +class TestMatchOriginalHost(object): + def test_matches_target_host_not_hostname(self): + result = load_config("match-orighost").lookup("target") + assert result["hostname"] == "bogus" + assert result["user"] == "tuon" + + def test_matches_target_host_not_canonicalized_name(self, socket): + result = load_config("match-orighost-canonical").lookup("www") + assert result["hostname"] == "www.paramiko.org" + assert result["user"] == "tuon" + + def test_may_be_globbed(self): + result = load_config("match-orighost").lookup("whatever") + assert result["user"] == "matrim" + + def test_may_be_comma_separated_list(self): + for target in ("comma", "separated"): + result = load_config("match-orighost").lookup(target) + assert result["user"] == "chameleon" + + def test_comma_separated_list_may_have_internal_negation(self): + result = load_config("match-orighost").lookup("nope") + assert "user" not in result + + def test_may_be_negated(self): + result = load_config("match-orighost").lookup("docs") + assert result["user"] == "thom" + + def test_requires_an_argument(self): + with raises(ConfigParseError): + load_config("match-orighost-no-arg") + + +class TestMatchUser(object): + def test_matches_configured_username(self): + result = load_config("match-user-explicit").lookup("anything") + assert result["hostname"] == "dumb" + + @patch("paramiko.config.getpass.getuser") + def test_matches_local_username_by_default(self, getuser): + getuser.return_value = "gandalf" + result = load_config("match-user").lookup("anything") + assert result["hostname"] == "gondor" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_globbed(self, getuser): + for user in ("bilbo", "bombadil"): + getuser.return_value = user + result = load_config("match-user").lookup("anything") + assert result["hostname"] == "shire" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_comma_separated_list(self, getuser): + for user in ("aragorn", "frodo"): + getuser.return_value = user + result = load_config("match-user").lookup("anything") + assert result["hostname"] == "moria" + + @patch("paramiko.config.getpass.getuser") + def test_comma_separated_list_may_have_internal_negation(self, getuser): + getuser.return_value = "legolas" + result = load_config("match-user").lookup("anything") + assert "port" not in result + getuser.return_value = "gimli" + result = load_config("match-user").lookup("anything") + assert result["port"] == "7373" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_negated(self, getuser): + getuser.return_value = "saruman" + result = load_config("match-user").lookup("anything") + assert result["hostname"] == "mordor" + + def test_requires_an_argument(self): + with raises(ConfigParseError): + load_config("match-user-no-arg") + + +# NOTE: highly derivative of previous suite due to the former's use of +# localuser fallback. Doesn't seem worth conflating/refactoring right now. +class TestMatchLocalUser(object): + @patch("paramiko.config.getpass.getuser") + def test_matches_local_username(self, getuser): + getuser.return_value = "gandalf" + result = load_config("match-localuser").lookup("anything") + assert result["hostname"] == "gondor" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_globbed(self, getuser): + for user in ("bilbo", "bombadil"): + getuser.return_value = user + result = load_config("match-localuser").lookup("anything") + assert result["hostname"] == "shire" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_comma_separated_list(self, getuser): + for user in ("aragorn", "frodo"): + getuser.return_value = user + result = load_config("match-localuser").lookup("anything") + assert result["hostname"] == "moria" + + @patch("paramiko.config.getpass.getuser") + def test_comma_separated_list_may_have_internal_negation(self, getuser): + getuser.return_value = "legolas" + result = load_config("match-localuser").lookup("anything") + assert "port" not in result + getuser.return_value = "gimli" + result = load_config("match-localuser").lookup("anything") + assert result["port"] == "7373" + + @patch("paramiko.config.getpass.getuser") + def test_may_be_negated(self, getuser): + getuser.return_value = "saruman" + result = load_config("match-localuser").lookup("anything") + assert result["hostname"] == "mordor" + + def test_requires_an_argument(self): + with raises(ConfigParseError): + load_config("match-localuser-no-arg") + + +class TestComplexMatching(object): + # NOTE: this is still a cherry-pick of a few levels of complexity, there's + # no point testing literally all possible combinations. + + def test_originalhost_host(self): + result = load_config("match-complex").lookup("target") + assert result["hostname"] == "bogus" + assert result["user"] == "rand" + + @patch("paramiko.config.getpass.getuser") + def test_originalhost_localuser(self, getuser): + getuser.return_value = "rando" + result = load_config("match-complex").lookup("remote") + assert result["user"] == "calrissian" + + @patch("paramiko.config.getpass.getuser") + def test_everything_but_all(self, getuser): + getuser.return_value = "rando" + result = load_config("match-complex").lookup("www") + assert result["port"] == "7777" + + @patch("paramiko.config.getpass.getuser") + def test_everything_but_all_with_some_negated(self, getuser): + getuser.return_value = "rando" + result = load_config("match-complex").lookup("docs") + assert result["port"] == "1234" + + def test_negated_canonical(self, socket): + # !canonical in a config that is not canonicalized - does match + result = load_config("match-canonical-no").lookup("specific") + assert result["user"] == "overload" + # !canonical in a config that is canonicalized - does NOT match + result = load_config("match-canonical-yes").lookup("www") + assert result["user"] == "hidden" diff --git a/tests/test_dss_openssh.key b/tests/test_dss_openssh.key new file mode 100644 index 000000000..2a9f8922a --- /dev/null +++ b/tests/test_dss_openssh.key @@ -0,0 +1,22 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABAsyq4pxL +R5sOprPDHGpvzxAAAAEAAAAAEAAAGxAAAAB3NzaC1kc3MAAACBAL8XEx7F9xuwBNles+vW +pNF+YcofrBhjX1r5QhpBe0eoYWLHRcroN6lxwCdGYRfgOoRjTncBiixQX/uUxAY96zDh3i +r492s2BcJt4ihvNn/AY0I0OTuX/2IwGk9CGzafjaeZNVYxMa8lcVt0hSOTjkPQ7gVuk6bJ +zMInvie+VWKLAAAAFQDUgYdY+rhR0SkKbC09BS/SIHcB+wAAAIB44+4zpCNcd0CGvZlowH +99zyPX8uxQtmTLQFuR2O8O0FgVVuCdDgD0D9W8CLOp32oatpM0jyyN89EdvSWzjHzZJ+L6 +H1FtZps7uhpDFWHdva1R25vyGecLMUuXjo5t/D7oCDih+HwHoSAxoi0QvsPd8/qqHQVznN +JKtR6thUpXEwAAAIAG4DCBjbgTTgpBw0egRkJwBSz0oTt+1IcapNU2jA6N8urMSk9YXHEQ +HKN68BAF3YJ59q2Ujv3LOXmBqGd1T+kzwUszfMlgzq8MMu19Yfzse6AIK1Agn1Vj6F7YXL +sXDN+T4KszX5+FJa7t/Zsp3nALWy6l0f4WKivEF5Y2QpEFcQAAAgCH6XUl1hYWB6kgCSHV +a4C+vQHrgFNgNwEQnE074LXHXlAhxC+Dm8XTGqVPX1KRPWzadq9/+v6pqLFqiRueB86uRb +J5WtAbUs3WwxAaC5Mi+mn42MBfL9PIwWPWCvstrAq9Nyj3EBMeX3XFLxN3RuGXIQnY/5rF +f5hriUVxhWDQGIVbBKhkpn7Geqg6nLpn7iqQhzFmFGjPmAdrllgdVGJRLyIN6BRsaltDdy +vxufkvGzKudvQ85QvsaoFJQ6K1d0S7907pexvxmWpcO7zchXb6i09BITWOAKIcHpVkbNQw ++8pzSdpggsAwCRbfk/Jkezz8sXVUCfmmJ23NFUw04/0ZbilCADRsUaPfafgVPeDznBnuCm +tfXa4JSrVUvPdwoex3SKZmYsFXwsuOEQnFkhUGHfWwTbmOmxzy6dtC24KYhnWG5OGFVJXh +3B8jQJGGs2ANfusI/Z0o15tAnQy5fqsLf9TT3RX7RG2ujIiDBsU+A1g//IXmSxxkUOQMZs +v+cMI8KfODAXmQtB30+yAgoV03Zb/bdptv+HqPT4eeecstJUxzEGYADt1mDq3uV7fQbNmo +80bppU52JjztrJb7hBmXsXHPRRK6spQ1FCatqvu1ggZeXZpEifNsHeqCljt87ueXsQsORY +pvhLzjTbTKZmjLDPuB+GxUNLEKh1ZNyAqKng== +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_ecdsa_384_openssh.key b/tests/test_ecdsa_384_openssh.key new file mode 100644 index 000000000..8a160ce28 --- /dev/null +++ b/tests/test_ecdsa_384_openssh.key @@ -0,0 +1,11 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABDwIHkBEZ +75XuqQS6/7daAIAAAAEAAAAAEAAACIAAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlz +dHAzODQAAABhBIch5LXTq/L/TWsTGG6dIktxD8DIMh7EfvoRmWsks6CuNDTvFvbQNtY4QO +1mn5OXegHbS0M5DPIS++wpKGFP3suDEH08O35vZQasLNrL0tO2jyyEnzB2ZEx3PPYci811 +ygAAAOBKGxFl+JcMHjldOdTA9iwv88gxoelCwln/NATglUuyzHMLJwx53n8NLqrnHALvbz +RHjyTmjU4dbSM9o9Vjhcvq+1aipjAQg2qx825f7T4BMoKyhLBS/qTg7RfyW/h0Sbequ1wl +PhBfwhv0LUphRFsGdnOgrXWfZqWqxOP1WhJWIh1p+ja5va/Ii/+hD6RORQjvzbHTPJA53c +OguISImkx0vdqPuFTLyclaC3eO4Px68Ki0b8cdyivExbAWLkNOtBdIAgeO7Egbruu4O5Sn +I6bn1Kc+kZlWtO02IkwSA5DaKw== +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_ed25519-funky-padding.key b/tests/test_ed25519-funky-padding.key new file mode 100644 index 000000000..f178ca458 --- /dev/null +++ b/tests/test_ed25519-funky-padding.key @@ -0,0 +1,7 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACAHzPvYoDSkMVX52/CbA2M2aSBS7R0wt/9b2n5n+osNygAAAJAHZ1meB2dZ +ngAAAAtzc2gtZWQyNTUxOQAAACAHzPvYoDSkMVX52/CbA2M2aSBS7R0wt/9b2n5n+osNyg +AAAEAIyamvYUpzCovQuUtLhz+fwE4qYQo+rTuUVIX4fmTzMAfM+9igNKQxVfnb8JsDYzZp +IFLtHTC3/1vafmf6iw3KAAAADW15IGNvbW1lbnQgaXM= +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_ed25519-funky-padding_password.key b/tests/test_ed25519-funky-padding_password.key new file mode 100644 index 000000000..1b135d69f --- /dev/null +++ b/tests/test_ed25519-funky-padding_password.key @@ -0,0 +1,8 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABDo3dGRlE +xKndv32nDnz2mHAAAAEAAAAAEAAAAzAAAAC3NzaC1lZDI1NTE5AAAAIDcAVH8yDxoiqj0O +rX3YTRMsnvJr+XdKJW16YQpxx8UvAAAAoI78IY+u8lYOzxAEO2N8qEVQH8b/m27yQhcSbK +q1RvvuHmql3NoQvjYQe9/om4oqE+uesNRnoQGNplBHCeroD3ZcksXhLGDhwTh577NR+NQ+ +GNYAK5Ex7Va3Xgao5HUYtBQXlXbtzY1Q+71hcOlRVNnLUDvwShdCa9o6ETIOGcZl04fbzv +Z3vC1C68G3+JMNFenAGYU+iQq0XENtpT6xAIU= +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_ed25519.key b/tests/test_ed25519.key new file mode 100644 index 000000000..eb9f94c20 --- /dev/null +++ b/tests/test_ed25519.key @@ -0,0 +1,8 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACB69SvZKJh/9VgSL0G27b5xVYa8nethH3IERbi0YqJDXwAAAKhjwAdrY8AH +awAAAAtzc2gtZWQyNTUxOQAAACB69SvZKJh/9VgSL0G27b5xVYa8nethH3IERbi0YqJDXw +AAAEA9tGQi2IrprbOSbDCF+RmAHd6meNSXBUQ2ekKXm4/8xnr1K9komH/1WBIvQbbtvnFV +hryd62EfcgRFuLRiokNfAAAAI2FsZXhfZ2F5bm9yQEFsZXhzLU1hY0Jvb2stQWlyLmxvY2 +FsAQI= +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_ed25519_password.key b/tests/test_ed25519_password.key new file mode 100644 index 000000000..d178aaaec --- /dev/null +++ b/tests/test_ed25519_password.key @@ -0,0 +1,8 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jYmMAAAAGYmNyeXB0AAAAGAAAABDaKD4ac7 +kieb+UfXaLaw68AAAAEAAAAAEAAAAzAAAAC3NzaC1lZDI1NTE5AAAAIOQn7fjND5ozMSV3 +CvbEtIdT73hWCMRjzS/lRdUDw50xAAAAsE8kLGyYBnl9ihJNqv378y6mO3SkzrDbWXOnK6 +ij0vnuTAvcqvWHAnyu6qBbplu/W2m55ZFeAItgaEcV2/V76sh/sAKlERqrLFyXylN0xoOW +NU5+zU08aTlbSKGmeNUU2xE/xfJq12U9XClIRuVUkUpYANxNPbmTRpVrbD3fgXMhK97Jrb +DEn8ca1IqMPiYmd/hpe5+tq3OxyRljXjCUFWTnqkp9VvUdzSTdSGZHsW9i +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_file.py b/tests/test_file.py old mode 100755 new mode 100644 index 7fab69850..2a3da74b7 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -21,172 +21,215 @@ """ import unittest -from paramiko.file import BufferedFile -from paramiko.common import linefeed_byte, crlf, cr_byte import sys +from paramiko.common import linefeed_byte, crlf, cr_byte +from paramiko.file import BufferedFile +from paramiko.py3compat import BytesIO + +from .util import needs_builtin -class LoopbackFile (BufferedFile): + +class LoopbackFile(BufferedFile): """ BufferedFile object that you can write data into, and then read it back. """ - def __init__(self, mode='r', bufsize=-1): + + def __init__(self, mode="r", bufsize=-1): BufferedFile.__init__(self) self._set_mode(mode, bufsize) - self.buffer = bytes() + self.buffer = BytesIO() + self.offset = 0 def _read(self, size): - if len(self.buffer) == 0: - return None - if size > len(self.buffer): - size = len(self.buffer) - data = self.buffer[:size] - self.buffer = self.buffer[size:] + data = self.buffer.getvalue()[self.offset : self.offset + size] + self.offset += len(data) return data def _write(self, data): - self.buffer += data + self.buffer.write(data) return len(data) -class BufferedFileTest (unittest.TestCase): - - def test_1_simple(self): - f = LoopbackFile('r') +class BufferedFileTest(unittest.TestCase): + def test_simple(self): + f = LoopbackFile("r") try: - f.write(b'hi') - self.assertTrue(False, 'no exception on write to read-only file') + f.write(b"hi") + self.assertTrue(False, "no exception on write to read-only file") except: pass f.close() - f = LoopbackFile('w') + f = LoopbackFile("w") try: f.read(1) - self.assertTrue(False, 'no exception to read from write-only file') + self.assertTrue(False, "no exception to read from write-only file") except: pass f.close() - def test_2_readline(self): - f = LoopbackFile('r+U') - f.write(b'First line.\nSecond line.\r\nThird line.\n' + - b'Fourth line.\nFinal line non-terminated.') + def test_readline(self): + f = LoopbackFile("r+U") + f.write( + b"First line.\nSecond line.\r\nThird line.\n" + + b"Fourth line.\nFinal line non-terminated." + ) - self.assertEqual(f.readline(), 'First line.\n') + self.assertEqual(f.readline(), "First line.\n") # universal newline mode should convert this linefeed: - self.assertEqual(f.readline(), 'Second line.\n') + self.assertEqual(f.readline(), "Second line.\n") # truncated line: - self.assertEqual(f.readline(7), 'Third l') - self.assertEqual(f.readline(), 'ine.\n') + self.assertEqual(f.readline(7), "Third l") + self.assertEqual(f.readline(), "ine.\n") # newline should be detected and only the fourth line returned - self.assertEqual(f.readline(39), 'Fourth line.\n') - self.assertEqual(f.readline(), 'Final line non-terminated.') - self.assertEqual(f.readline(), '') + self.assertEqual(f.readline(39), "Fourth line.\n") + self.assertEqual(f.readline(), "Final line non-terminated.") + self.assertEqual(f.readline(), "") f.close() try: f.readline() - self.assertTrue(False, 'no exception on readline of closed file') + self.assertTrue(False, "no exception on readline of closed file") except IOError: pass self.assertTrue(linefeed_byte in f.newlines) self.assertTrue(crlf in f.newlines) self.assertTrue(cr_byte not in f.newlines) - def test_3_lf(self): + def test_lf(self): """ try to trick the linefeed detector. """ - f = LoopbackFile('r+U') - f.write(b'First line.\r') - self.assertEqual(f.readline(), 'First line.\n') - f.write(b'\nSecond.\r\n') - self.assertEqual(f.readline(), 'Second.\n') + f = LoopbackFile("r+U") + f.write(b"First line.\r") + self.assertEqual(f.readline(), "First line.\n") + f.write(b"\nSecond.\r\n") + self.assertEqual(f.readline(), "Second.\n") f.close() self.assertEqual(f.newlines, crlf) - def test_4_write(self): + def test_write(self): """ verify that write buffering is on. """ - f = LoopbackFile('r+', 1) - f.write(b'Complete line.\nIncomplete line.') - self.assertEqual(f.readline(), 'Complete line.\n') - self.assertEqual(f.readline(), '') - f.write('..\n') - self.assertEqual(f.readline(), 'Incomplete line...\n') + f = LoopbackFile("r+", 1) + f.write(b"Complete line.\nIncomplete line.") + self.assertEqual(f.readline(), "Complete line.\n") + self.assertEqual(f.readline(), "") + f.write("..\n") + self.assertEqual(f.readline(), "Incomplete line...\n") f.close() - def test_5_flush(self): + def test_flush(self): """ verify that flush will force a write. """ - f = LoopbackFile('r+', 512) - f.write('Not\nquite\n512 bytes.\n') - self.assertEqual(f.read(1), b'') + f = LoopbackFile("r+", 512) + f.write("Not\nquite\n512 bytes.\n") + self.assertEqual(f.read(1), b"") f.flush() - self.assertEqual(f.read(5), b'Not\nq') - self.assertEqual(f.read(10), b'uite\n512 b') - self.assertEqual(f.read(9), b'ytes.\n') - self.assertEqual(f.read(3), b'') + self.assertEqual(f.read(5), b"Not\nq") + self.assertEqual(f.read(10), b"uite\n512 b") + self.assertEqual(f.read(9), b"ytes.\n") + self.assertEqual(f.read(3), b"") f.close() - def test_6_buffering(self): + def test_buffering_flushes(self): """ verify that flushing happens automatically on buffer crossing. """ - f = LoopbackFile('r+', 16) - f.write(b'Too small.') - self.assertEqual(f.read(4), b'') - f.write(b' ') - self.assertEqual(f.read(4), b'') - f.write(b'Enough.') - self.assertEqual(f.read(20), b'Too small. Enough.') + f = LoopbackFile("r+", 16) + f.write(b"Too small.") + self.assertEqual(f.read(4), b"") + f.write(b" ") + self.assertEqual(f.read(4), b"") + f.write(b"Enough.") + self.assertEqual(f.read(20), b"Too small. Enough.") f.close() - def test_7_read_all(self): + def test_read_all(self): """ verify that read(-1) returns everything left in the file. """ - f = LoopbackFile('r+', 16) - f.write(b'The first thing you need to do is open your eyes. ') - f.write(b'Then, you need to close them again.\n') + f = LoopbackFile("r+", 16) + f.write(b"The first thing you need to do is open your eyes. ") + f.write(b"Then, you need to close them again.\n") s = f.read(-1) - self.assertEqual(s, b'The first thing you need to do is open your eyes. Then, you ' + - b'need to close them again.\n') + self.assertEqual( + s, + b"The first thing you need to do is open your eyes. Then, you " + + b"need to close them again.\n", + ) f.close() - def test_8_buffering(self): + def test_buffering_writes(self): """ verify that buffered objects can be written """ if sys.version_info[0] == 2: - f = LoopbackFile('r+', 16) - f.write(buffer(b'Too small.')) + f = LoopbackFile("r+", 16) + f.write(buffer(b"Too small.")) # noqa f.close() - def test_9_readable(self): - f = LoopbackFile('r') + def test_readable(self): + f = LoopbackFile("r") self.assertTrue(f.readable()) self.assertFalse(f.writable()) self.assertFalse(f.seekable()) f.close() - def test_A_writable(self): - f = LoopbackFile('w') + def test_writable(self): + f = LoopbackFile("w") self.assertTrue(f.writable()) self.assertFalse(f.readable()) self.assertFalse(f.seekable()) f.close() - def test_B_readinto(self): + def test_readinto(self): data = bytearray(5) - f = LoopbackFile('r+') + f = LoopbackFile("r+") f._write(b"hello") f.readinto(data) - self.assertEqual(data, b'hello') + self.assertEqual(data, b"hello") f.close() -if __name__ == '__main__': + def test_write_bad_type(self): + with LoopbackFile("wb") as f: + self.assertRaises(TypeError, f.write, object()) + + def test_write_unicode_as_binary(self): + text = u"\xa7 why is writing text to a binary file allowed?\n" + with LoopbackFile("rb+") as f: + f.write(text) + self.assertEqual(f.read(), text.encode("utf-8")) + + @needs_builtin("memoryview") + def test_write_bytearray(self): + with LoopbackFile("rb+") as f: + f.write(bytearray(12)) + self.assertEqual(f.read(), 12 * b"\0") + + @needs_builtin("buffer") + def test_write_buffer(self): + data = 3 * b"pretend giant block of data\n" + offsets = range(0, len(data), 8) + with LoopbackFile("rb+") as f: + for offset in offsets: + f.write(buffer(data, offset, 8)) # noqa + self.assertEqual(f.read(), data) + + @needs_builtin("memoryview") + def test_write_memoryview(self): + data = 3 * b"pretend giant block of data\n" + offsets = range(0, len(data), 8) + with LoopbackFile("rb+") as f: + view = memoryview(data) + for offset in offsets: + f.write(view[offset : offset + 8]) + self.assertEqual(f.read(), data) + + +if __name__ == "__main__": from unittest import main + main() diff --git a/tests/test_gssapi.py b/tests/test_gssapi.py index 96c268d9f..acdc7c821 100644 --- a/tests/test_gssapi.py +++ b/tests/test_gssapi.py @@ -22,64 +22,89 @@ Test the used APIs for GSS-API / SSPI authentication """ -import unittest import socket +from .util import needs_gssapi, KerberosTestCase, update_env + +# +# NOTE: KerberosTestCase skips all tests if it was unable to import k5test +# third-party library. That's the primary trigger for whether this module +# effectively gets run or not. See tests/util.py for other triggers (a set of +# env vars a human might have defined). +# -class GSSAPITest(unittest.TestCase): - @staticmethod - def init(hostname=None, srv_mode=False): - global krb5_mech, targ_name, server_mode - krb5_mech = "1.2.840.113554.1.2.2" - targ_name = hostname - server_mode = srv_mode - def test_1_pyasn1(self): +@needs_gssapi +class GSSAPITest(KerberosTestCase): + def setUp(self): + super(GSSAPITest, self).setUp() + # TODO: these vars should all come from os.environ or whatever the + # approved pytest method is for runtime-configuring test data. + self.krb5_mech = "1.2.840.113554.1.2.2" + self.targ_name = self.realm.hostname + self.server_mode = False + update_env(self, self.realm.env) + + def test_pyasn1(self): """ Test the used methods of pyasn1. """ from pyasn1.type.univ import ObjectIdentifier from pyasn1.codec.der import encoder, decoder - oid = encoder.encode(ObjectIdentifier(krb5_mech)) + + oid = encoder.encode(ObjectIdentifier(self.krb5_mech)) mech, __ = decoder.decode(oid) - self.assertEquals(krb5_mech, mech.__str__()) + self.assertEquals(self.krb5_mech, mech.__str__()) - def test_2_gssapi_sspi(self): + def _gssapi_sspi_test(self): """ Test the used methods of python-gssapi or sspi, sspicon from pywin32. """ - _API = "MIT" try: import gssapi + + if ( + hasattr(gssapi, "__title__") + and gssapi.__title__ == "python-gssapi" + ): + _API = "PYTHON-GSSAPI-OLD" + else: + _API = "PYTHON-GSSAPI-NEW" except ImportError: import sspicon import sspi + _API = "SSPI" c_token = None gss_ctxt_status = False mic_msg = b"G'day Mate!" - if _API == "MIT": - if server_mode: - gss_flags = (gssapi.C_PROT_READY_FLAG, - gssapi.C_INTEG_FLAG, - gssapi.C_MUTUAL_FLAG, - gssapi.C_DELEG_FLAG) + if _API == "PYTHON-GSSAPI-OLD": + if self.server_mode: + gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_MUTUAL_FLAG, + gssapi.C_DELEG_FLAG, + ) else: - gss_flags = (gssapi.C_PROT_READY_FLAG, - gssapi.C_INTEG_FLAG, - gssapi.C_DELEG_FLAG) + gss_flags = ( + gssapi.C_PROT_READY_FLAG, + gssapi.C_INTEG_FLAG, + gssapi.C_DELEG_FLAG, + ) # Initialize a GSS-API context. ctx = gssapi.Context() ctx.flags = gss_flags - krb5_oid = gssapi.OID.mech_from_string(krb5_mech) - target_name = gssapi.Name("host@" + targ_name, - gssapi.C_NT_HOSTBASED_SERVICE) - gss_ctxt = gssapi.InitContext(peer_name=target_name, - mech_type=krb5_oid, - req_flags=ctx.flags) - if server_mode: + krb5_oid = gssapi.OID.mech_from_string(self.krb5_mech) + target_name = gssapi.Name( + "host@" + self.targ_name, gssapi.C_NT_HOSTBASED_SERVICE + ) + gss_ctxt = gssapi.InitContext( + peer_name=target_name, mech_type=krb5_oid, req_flags=ctx.flags + ) + if self.server_mode: c_token = gss_ctxt.step(c_token) gss_ctxt_status = gss_ctxt.established self.assertEquals(False, gss_ctxt_status) @@ -99,20 +124,72 @@ def test_2_gssapi_sspi(self): # Build MIC mic_token = gss_ctxt.get_mic(mic_msg) - if server_mode: + if self.server_mode: # Check MIC status = gss_srv_ctxt.verify_mic(mic_msg, mic_token) self.assertEquals(0, status) + elif _API == "PYTHON-GSSAPI-NEW": + if self.server_mode: + gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.delegate_to_peer, + ) + else: + gss_flags = ( + gssapi.RequirementFlag.protection_ready, + gssapi.RequirementFlag.integrity, + gssapi.RequirementFlag.delegate_to_peer, + ) + # Initialize a GSS-API context. + krb5_oid = gssapi.MechType.kerberos + target_name = gssapi.Name( + "host@" + self.targ_name, + name_type=gssapi.NameType.hostbased_service, + ) + gss_ctxt = gssapi.SecurityContext( + name=target_name, + flags=gss_flags, + mech=krb5_oid, + usage="initiate", + ) + if self.server_mode: + c_token = gss_ctxt.step(c_token) + gss_ctxt_status = gss_ctxt.complete + self.assertEquals(False, gss_ctxt_status) + # Accept a GSS-API context. + gss_srv_ctxt = gssapi.SecurityContext(usage="accept") + s_token = gss_srv_ctxt.step(c_token) + gss_ctxt_status = gss_srv_ctxt.complete + self.assertNotEquals(None, s_token) + self.assertEquals(True, gss_ctxt_status) + # Establish the client context + c_token = gss_ctxt.step(s_token) + self.assertEquals(None, c_token) + else: + while not gss_ctxt.complete: + c_token = gss_ctxt.step(c_token) + self.assertNotEquals(None, c_token) + # Build MIC + mic_token = gss_ctxt.get_signature(mic_msg) + + if self.server_mode: + # Check MIC + status = gss_srv_ctxt.verify_signature(mic_msg, mic_token) + self.assertEquals(0, status) else: - gss_flags = sspicon.ISC_REQ_INTEGRITY |\ - sspicon.ISC_REQ_MUTUAL_AUTH |\ - sspicon.ISC_REQ_DELEGATE + gss_flags = ( + sspicon.ISC_REQ_INTEGRITY + | sspicon.ISC_REQ_MUTUAL_AUTH + | sspicon.ISC_REQ_DELEGATE + ) # Initialize a GSS-API context. - target_name = "host/" + socket.getfqdn(targ_name) - gss_ctxt = sspi.ClientAuth("Kerberos", - scflags=gss_flags, - targetspn=target_name) - if server_mode: + target_name = "host/" + socket.getfqdn(self.targ_name) + gss_ctxt = sspi.ClientAuth( + "Kerberos", scflags=gss_flags, targetspn=target_name + ) + if self.server_mode: error, token = gss_ctxt.authorize(c_token) c_token = token[0].Buffer self.assertEquals(0, error) @@ -133,3 +210,16 @@ def test_2_gssapi_sspi(self): error, token = gss_ctxt.authorize(c_token) c_token = token[0].Buffer self.assertNotEquals(0, error) + + def test_gssapi_sspi_client(self): + """ + Test the used methods of python-gssapi or sspi, sspicon from pywin32. + """ + self._gssapi_sspi_test() + + def test_gssapi_sspi_server(self): + """ + Test the used methods of python-gssapi or sspi, sspicon from pywin32. + """ + self.server_mode = True + self._gssapi_sspi_test() diff --git a/tests/test_hostkeys.py b/tests/test_hostkeys.py index 2bdcad9c3..41a9244f1 100644 --- a/tests/test_hostkeys.py +++ b/tests/test_hostkeys.py @@ -23,6 +23,7 @@ from binascii import hexlify import os import unittest + import paramiko from paramiko.py3compat import decodebytes @@ -53,65 +54,96 @@ 0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgE=""" -class HostKeysTest (unittest.TestCase): - +class HostKeysTest(unittest.TestCase): def setUp(self): - with open('hostfile.temp', 'w') as f: + with open("hostfile.temp", "w") as f: f.write(test_hosts_file) def tearDown(self): - os.unlink('hostfile.temp') + os.unlink("hostfile.temp") - def test_1_load(self): - hostdict = paramiko.HostKeys('hostfile.temp') + def test_load(self): + hostdict = paramiko.HostKeys("hostfile.temp") self.assertEqual(2, len(hostdict)) self.assertEqual(1, len(list(hostdict.values())[0])) self.assertEqual(1, len(list(hostdict.values())[1])) - fp = hexlify(hostdict['secure.example.com']['ssh-rsa'].get_fingerprint()).upper() - self.assertEqual(b'E6684DB30E109B67B70FF1DC5C7F1363', fp) + fp = hexlify( + hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint() + ).upper() + self.assertEqual(b"E6684DB30E109B67B70FF1DC5C7F1363", fp) - def test_2_add(self): - hostdict = paramiko.HostKeys('hostfile.temp') - hh = '|1|BMsIC6cUIP2zBuXR3t2LRcJYjzM=|hpkJMysjTk/+zzUUzxQEa2ieq6c=' + def test_add(self): + hostdict = paramiko.HostKeys("hostfile.temp") + hh = "|1|BMsIC6cUIP2zBuXR3t2LRcJYjzM=|hpkJMysjTk/+zzUUzxQEa2ieq6c=" key = paramiko.RSAKey(data=decodebytes(keyblob)) - hostdict.add(hh, 'ssh-rsa', key) + hostdict.add(hh, "ssh-rsa", key) self.assertEqual(3, len(list(hostdict))) - x = hostdict['foo.example.com'] - fp = hexlify(x['ssh-rsa'].get_fingerprint()).upper() - self.assertEqual(b'7EC91BB336CB6D810B124B1353C32396', fp) - self.assertTrue(hostdict.check('foo.example.com', key)) - - def test_3_dict(self): - hostdict = paramiko.HostKeys('hostfile.temp') - self.assertTrue('secure.example.com' in hostdict) - self.assertTrue('not.example.com' not in hostdict) - self.assertTrue('secure.example.com' in hostdict) - self.assertTrue('not.example.com' not in hostdict) - x = hostdict.get('secure.example.com', None) + x = hostdict["foo.example.com"] + fp = hexlify(x["ssh-rsa"].get_fingerprint()).upper() + self.assertEqual(b"7EC91BB336CB6D810B124B1353C32396", fp) + self.assertTrue(hostdict.check("foo.example.com", key)) + + def test_dict(self): + hostdict = paramiko.HostKeys("hostfile.temp") + self.assertTrue("secure.example.com" in hostdict) + self.assertTrue("not.example.com" not in hostdict) + self.assertTrue("secure.example.com" in hostdict) + self.assertTrue("not.example.com" not in hostdict) + x = hostdict.get("secure.example.com", None) self.assertTrue(x is not None) - fp = hexlify(x['ssh-rsa'].get_fingerprint()).upper() - self.assertEqual(b'E6684DB30E109B67B70FF1DC5C7F1363', fp) + fp = hexlify(x["ssh-rsa"].get_fingerprint()).upper() + self.assertEqual(b"E6684DB30E109B67B70FF1DC5C7F1363", fp) i = 0 for key in hostdict: i += 1 self.assertEqual(2, i) - - def test_4_dict_set(self): - hostdict = paramiko.HostKeys('hostfile.temp') + + def test_dict_set(self): + hostdict = paramiko.HostKeys("hostfile.temp") key = paramiko.RSAKey(data=decodebytes(keyblob)) key_dss = paramiko.DSSKey(data=decodebytes(keyblob_dss)) - hostdict['secure.example.com'] = { - 'ssh-rsa': key, - 'ssh-dss': key_dss - } - hostdict['fake.example.com'] = {} - hostdict['fake.example.com']['ssh-rsa'] = key - + hostdict["secure.example.com"] = {"ssh-rsa": key, "ssh-dss": key_dss} + hostdict["fake.example.com"] = {} + hostdict["fake.example.com"]["ssh-rsa"] = key + self.assertEqual(3, len(hostdict)) self.assertEqual(2, len(list(hostdict.values())[0])) self.assertEqual(1, len(list(hostdict.values())[1])) self.assertEqual(1, len(list(hostdict.values())[2])) - fp = hexlify(hostdict['secure.example.com']['ssh-rsa'].get_fingerprint()).upper() - self.assertEqual(b'7EC91BB336CB6D810B124B1353C32396', fp) - fp = hexlify(hostdict['secure.example.com']['ssh-dss'].get_fingerprint()).upper() - self.assertEqual(b'4478F0B9A23CC5182009FF755BC1D26C', fp) + fp = hexlify( + hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint() + ).upper() + self.assertEqual(b"7EC91BB336CB6D810B124B1353C32396", fp) + fp = hexlify( + hostdict["secure.example.com"]["ssh-dss"].get_fingerprint() + ).upper() + self.assertEqual(b"4478F0B9A23CC5182009FF755BC1D26C", fp) + + def test_delitem(self): + hostdict = paramiko.HostKeys("hostfile.temp") + target = "happy.example.com" + hostdict[target] # will KeyError if not present + del hostdict[target] + try: + hostdict[target] + except KeyError: + pass # Good + else: + assert False, "Entry was not deleted from HostKeys on delitem!" + + def test_entry_delitem(self): + hostdict = paramiko.HostKeys("hostfile.temp") + target = "happy.example.com" + entry = hostdict[target] + key_type_list = [key_type for key_type in entry] + for key_type in key_type_list: + del entry[key_type] + + # will KeyError if not present + for key_type in key_type_list: + try: + del entry[key_type] + except KeyError: + pass # Good + else: + assert False, "Key was not deleted from Entry on delitem!" diff --git a/tests/test_kex.py b/tests/test_kex.py index 19804fbf7..b73989c20 100644 --- a/tests/test_kex.py +++ b/tests/test_kex.py @@ -20,34 +20,68 @@ Some unit tests for the key exchange protocols. """ -from binascii import hexlify +from binascii import hexlify, unhexlify import os import unittest +from mock import Mock, patch +import pytest + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import ec + +try: + from cryptography.hazmat.primitives.asymmetric import x25519 +except ImportError: + x25519 = None + import paramiko.util from paramiko.kex_group1 import KexGroup1 +from paramiko.kex_group14 import KexGroup14SHA256 from paramiko.kex_gex import KexGex, KexGexSHA256 from paramiko import Message from paramiko.common import byte_chr +from paramiko.kex_ecdh_nist import KexNistp256 +from paramiko.kex_group16 import KexGroup16SHA512 +from paramiko.kex_curve25519 import KexCurve25519 def dummy_urandom(n): return byte_chr(0xcc) * n -class FakeKey (object): +def dummy_generate_key_pair(obj): + private_key_value = 94761803665136558137557783047955027733968423115106677159790289642479432803037 # noqa + public_key_numbers = "042bdab212fa8ba1b7c843301682a4db424d307246c7e1e6083c41d9ca7b098bf30b3d63e2ec6278488c135360456cc054b3444ecc45998c08894cbc1370f5f989" # noqa + public_key_numbers_obj = ec.EllipticCurvePublicKey.from_encoded_point( + ec.SECP256R1(), unhexlify(public_key_numbers) + ).public_numbers() + obj.P = ec.EllipticCurvePrivateNumbers( + private_value=private_key_value, public_numbers=public_key_numbers_obj + ).private_key(default_backend()) + if obj.transport.server_mode: + obj.Q_S = ec.EllipticCurvePublicKey.from_encoded_point( + ec.SECP256R1(), unhexlify(public_key_numbers) + ) + return + obj.Q_C = ec.EllipticCurvePublicKey.from_encoded_point( + ec.SECP256R1(), unhexlify(public_key_numbers) + ) + + +class FakeKey(object): def __str__(self): - return 'fake-key' + return "fake-key" def asbytes(self): - return b'fake-key' + return b"fake-key" - def sign_ssh_data(self, H): - return b'fake-sig' + def sign_ssh_data(self, H, algorithm): + return b"fake-sig" -class FakeModulusPack (object): - P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF +class FakeModulusPack(object): + P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF # noqa G = 2 def get_modulus(self, min, ask, max): @@ -55,10 +89,11 @@ def get_modulus(self, min, ask, max): class FakeTransport(object): - local_version = 'SSH-2.0-paramiko_1.0' - remote_version = 'SSH-2.0-lame' - local_kex_init = 'local-kex-init' - remote_kex_init = 'remote-kex-init' + local_version = "SSH-2.0-paramiko_1.0" + remote_version = "SSH-2.0-lame" + local_kex_init = "local-kex-init" + remote_kex_init = "remote-kex-init" + host_key_type = "fake-key" def _send_message(self, m): self._message = m @@ -86,123 +121,160 @@ def _get_modulus_pack(self): return FakeModulusPack() -class KexTest (unittest.TestCase): +class KexTest(unittest.TestCase): - K = 14730343317708716439807310032871972459448364195094179797249681733965528989482751523943515690110179031004049109375612685505881911274101441415545039654102474376472240501616988799699744135291070488314748284283496055223852115360852283821334858541043710301057312858051901453919067023103730011648890038847384890504 + K = 14730343317708716439807310032871972459448364195094179797249681733965528989482751523943515690110179031004049109375612685505881911274101441415545039654102474376472240501616988799699744135291070488314748284283496055223852115360852283821334858541043710301057312858051901453919067023103730011648890038847384890504 # noqa def setUp(self): self._original_urandom = os.urandom os.urandom = dummy_urandom + self._original_generate_key_pair = KexNistp256._generate_key_pair + KexNistp256._generate_key_pair = dummy_generate_key_pair + + if KexCurve25519.is_available(): + static_x25519_key = x25519.X25519PrivateKey.from_private_bytes( + unhexlify( + b"2184abc7eb3e656d2349d2470ee695b570c227340c2b2863b6c9ff427af1f040" # noqa + ) + ) + mock_x25519 = Mock() + mock_x25519.generate.return_value = static_x25519_key + patcher = patch( + "paramiko.kex_curve25519.X25519PrivateKey", mock_x25519 + ) + patcher.start() + self.x25519_patcher = patcher def tearDown(self): os.urandom = self._original_urandom + KexNistp256._generate_key_pair = self._original_generate_key_pair + if hasattr(self, "x25519_patcher"): + self.x25519_patcher.stop() - def test_1_group1_client(self): + def test_group1_client(self): transport = FakeTransport() transport.server_mode = False kex = KexGroup1(transport) kex.start_kex() - x = b'1E000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4' + x = b"1E000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect) + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect + ) # fake "reply" msg = Message() - msg.add_string('fake-host-key') + msg.add_string("fake-host-key") msg.add_mpint(69) - msg.add_string('fake-sig') + msg.add_string("fake-sig") msg.rewind() kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg) - H = b'03079780F3D3AD0B3C6DB30C8D21685F367A86D2' + H = b"03079780F3D3AD0B3C6DB30C8D21685F367A86D2" self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) - self.assertEqual((b'fake-host-key', b'fake-sig'), transport._verify) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) self.assertTrue(transport._activated) - def test_2_group1_server(self): + def test_group1_server(self): transport = FakeTransport() transport.server_mode = True kex = KexGroup1(transport) kex.start_kex() - self.assertEqual((paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect) + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect + ) msg = Message() msg.add_mpint(69) msg.rewind() kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg) - H = b'B16BF34DD10945EDE84E9C1EF24A14BFDC843389' - x = b'1F0000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967' + H = b"B16BF34DD10945EDE84E9C1EF24A14BFDC843389" + x = b"1F0000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) self.assertTrue(transport._activated) - def test_3_gex_client(self): + def test_gex_client(self): transport = FakeTransport() transport.server_mode = False kex = KexGex(transport) kex.start_kex() - x = b'22000004000000080000002000' + x = b"22000004000000080000002000" self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect + ) msg = Message() msg.add_mpint(FakeModulusPack.P) msg.add_mpint(FakeModulusPack.G) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg) - x = b'20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4' + x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect + ) msg = Message() - msg.add_string('fake-host-key') + msg.add_string("fake-host-key") msg.add_mpint(69) - msg.add_string('fake-sig') + msg.add_string("fake-sig") msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg) - H = b'A265563F2FA87F1A89BF007EE90D58BE2E4A4BD0' + H = b"A265563F2FA87F1A89BF007EE90D58BE2E4A4BD0" self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) - self.assertEqual((b'fake-host-key', b'fake-sig'), transport._verify) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) self.assertTrue(transport._activated) - def test_4_gex_old_client(self): + def test_gex_old_client(self): transport = FakeTransport() transport.server_mode = False kex = KexGex(transport) kex.start_kex(_test_old_style=True) - x = b'1E00000800' + x = b"1E00000800" self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect + ) msg = Message() msg.add_mpint(FakeModulusPack.P) msg.add_mpint(FakeModulusPack.G) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg) - x = b'20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4' + x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect + ) msg = Message() - msg.add_string('fake-host-key') + msg.add_string("fake-host-key") msg.add_mpint(69) - msg.add_string('fake-sig') + msg.add_string("fake-sig") msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg) - H = b'807F87B269EF7AC5EC7E75676808776A27D5864C' + H = b"807F87B269EF7AC5EC7E75676808776A27D5864C" self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) - self.assertEqual((b'fake-host-key', b'fake-sig'), transport._verify) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) self.assertTrue(transport._activated) - - def test_5_gex_server(self): + + def test_gex_server(self): transport = FakeTransport() transport.server_mode = True kex = KexGex(transport) kex.start_kex() - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD), transport._expect) + self.assertEqual( + ( + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, + ), + transport._expect, + ) msg = Message() msg.add_int(1024) @@ -210,115 +282,139 @@ def test_5_gex_server(self): msg.add_int(4096) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, msg) - x = b'1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102' + x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect + ) msg = Message() msg.add_mpint(12345) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg) - K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 - H = b'CE754197C21BF3452863B4F44D0B3951F12516EF' - x = b'210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967' + K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa + H = b"CE754197C21BF3452863B4F44D0B3951F12516EF" + x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa self.assertEqual(K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) self.assertTrue(transport._activated) - def test_6_gex_server_with_old_client(self): + def test_gex_server_with_old_client(self): transport = FakeTransport() transport.server_mode = True kex = KexGex(transport) kex.start_kex() - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD), transport._expect) + self.assertEqual( + ( + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, + ), + transport._expect, + ) msg = Message() msg.add_int(2048) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, msg) - x = b'1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102' + x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect + ) msg = Message() msg.add_mpint(12345) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg) - K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 - H = b'B41A06B2E59043CEFC1AE16EC31F1E2D12EC455B' - x = b'210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967' + K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa + H = b"B41A06B2E59043CEFC1AE16EC31F1E2D12EC455B" + x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa self.assertEqual(K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) self.assertTrue(transport._activated) - def test_7_gex_sha256_client(self): + def test_gex_sha256_client(self): transport = FakeTransport() transport.server_mode = False kex = KexGexSHA256(transport) kex.start_kex() - x = b'22000004000000080000002000' + x = b"22000004000000080000002000" self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect + ) msg = Message() msg.add_mpint(FakeModulusPack.P) msg.add_mpint(FakeModulusPack.G) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg) - x = b'20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4' + x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect + ) msg = Message() - msg.add_string('fake-host-key') + msg.add_string("fake-host-key") msg.add_mpint(69) - msg.add_string('fake-sig') + msg.add_string("fake-sig") msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg) - H = b'AD1A9365A67B4496F05594AD1BF656E3CDA0851289A4C1AFF549FEAE50896DF4' + H = b"AD1A9365A67B4496F05594AD1BF656E3CDA0851289A4C1AFF549FEAE50896DF4" self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) - self.assertEqual((b'fake-host-key', b'fake-sig'), transport._verify) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) self.assertTrue(transport._activated) - def test_8_gex_sha256_old_client(self): + def test_gex_sha256_old_client(self): transport = FakeTransport() transport.server_mode = False kex = KexGexSHA256(transport) kex.start_kex(_test_old_style=True) - x = b'1E00000800' + x = b"1E00000800" self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_GROUP,), transport._expect + ) msg = Message() msg.add_mpint(FakeModulusPack.P) msg.add_mpint(FakeModulusPack.G) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_GROUP, msg) - x = b'20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4' + x = b"20000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D4" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_REPLY,), transport._expect + ) msg = Message() - msg.add_string('fake-host-key') + msg.add_string("fake-host-key") msg.add_mpint(69) - msg.add_string('fake-sig') + msg.add_string("fake-sig") msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REPLY, msg) - H = b'518386608B15891AE5237DEE08DCADDE76A0BCEFCE7F6DB3AD66BC41D256DFE5' + H = b"518386608B15891AE5237DEE08DCADDE76A0BCEFCE7F6DB3AD66BC41D256DFE5" self.assertEqual(self.K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) - self.assertEqual((b'fake-host-key', b'fake-sig'), transport._verify) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) self.assertTrue(transport._activated) - def test_9_gex_sha256_server(self): + def test_gex_sha256_server(self): transport = FakeTransport() transport.server_mode = True kex = KexGexSHA256(transport) kex.start_kex() - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD), transport._expect) + self.assertEqual( + ( + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, + ), + transport._expect, + ) msg = Message() msg.add_int(1024) @@ -326,47 +422,247 @@ def test_9_gex_sha256_server(self): msg.add_int(4096) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, msg) - x = b'1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102' + x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect + ) msg = Message() msg.add_mpint(12345) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg) - K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 - H = b'CCAC0497CF0ABA1DBF55E1A3995D17F4CC31824B0E8D95CDF8A06F169D050D80' - x = b'210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967' + K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa + H = b"CCAC0497CF0ABA1DBF55E1A3995D17F4CC31824B0E8D95CDF8A06F169D050D80" + x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa self.assertEqual(K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) self.assertTrue(transport._activated) - def test_10_gex_sha256_server_with_old_client(self): + def test_gex_sha256_server_with_old_client(self): transport = FakeTransport() transport.server_mode = True kex = KexGexSHA256(transport) kex.start_kex() - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD), transport._expect) + self.assertEqual( + ( + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST, + paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, + ), + transport._expect, + ) msg = Message() msg.add_int(2048) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_REQUEST_OLD, msg) - x = b'1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102' + x = b"1F0000008100FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFF0000000102" # noqa self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) - self.assertEqual((paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect) + self.assertEqual( + (paramiko.kex_gex._MSG_KEXDH_GEX_INIT,), transport._expect + ) msg = Message() msg.add_mpint(12345) msg.rewind() kex.parse_next(paramiko.kex_gex._MSG_KEXDH_GEX_INIT, msg) - K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 - H = b'3DDD2AD840AD095E397BA4D0573972DC60F6461FD38A187CACA6615A5BC8ADBB' - x = b'210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967' + K = 67592995013596137876033460028393339951879041140378510871612128162185209509220726296697886624612526735888348020498716482757677848959420073720160491114319163078862905400020959196386947926388406687288901564192071077389283980347784184487280885335302632305026248574716290537036069329724382811853044654824945750581 # noqa + H = b"3DDD2AD840AD095E397BA4D0573972DC60F6461FD38A187CACA6615A5BC8ADBB" + x = b"210000000866616B652D6B6579000000807E2DDB1743F3487D6545F04F1C8476092FB912B013626AB5BCEB764257D88BBA64243B9F348DF7B41B8C814A995E00299913503456983FFB9178D3CD79EB6D55522418A8ABF65375872E55938AB99A84A0B5FC8A1ECC66A7C3766E7E0F80B7CE2C9225FC2DD683F4764244B72963BBB383F529DCF0C5D17740B8A2ADBE9208D40000000866616B652D736967" # noqa + self.assertEqual(K, transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) + self.assertTrue(transport._activated) + + def test_kex_nistp256_client(self): + K = 91610929826364598472338906427792435253694642563583721654249504912114314269754 # noqa + transport = FakeTransport() + transport.server_mode = False + kex = KexNistp256(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_ecdh_nist._MSG_KEXECDH_REPLY,), transport._expect + ) + + # fake reply + msg = Message() + msg.add_string("fake-host-key") + Q_S = unhexlify( + "043ae159594ba062efa121480e9ef136203fa9ec6b6e1f8723a321c16e62b945f573f3b822258cbcd094b9fa1c125cbfe5f043280893e66863cc0cb4dccbe70210" # noqa + ) + msg.add_string(Q_S) + msg.add_string("fake-sig") + msg.rewind() + kex.parse_next(paramiko.kex_ecdh_nist._MSG_KEXECDH_REPLY, msg) + H = b"BAF7CE243A836037EB5D2221420F35C02B9AB6C957FE3BDE3369307B9612570A" + self.assertEqual(K, kex.transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) + self.assertTrue(transport._activated) + + def test_kex_nistp256_server(self): + K = 91610929826364598472338906427792435253694642563583721654249504912114314269754 # noqa + transport = FakeTransport() + transport.server_mode = True + kex = KexNistp256(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_ecdh_nist._MSG_KEXECDH_INIT,), transport._expect + ) + + # fake init + msg = Message() + Q_C = unhexlify( + "043ae159594ba062efa121480e9ef136203fa9ec6b6e1f8723a321c16e62b945f573f3b822258cbcd094b9fa1c125cbfe5f043280893e66863cc0cb4dccbe70210" # noqa + ) + H = b"2EF4957AFD530DD3F05DBEABF68D724FACC060974DA9704F2AEE4C3DE861E7CA" + msg.add_string(Q_C) + msg.rewind() + kex.parse_next(paramiko.kex_ecdh_nist._MSG_KEXECDH_INIT, msg) + self.assertEqual(K, transport._K) + self.assertTrue(transport._activated) + self.assertEqual(H, hexlify(transport._H).upper()) + + def test_kex_group14_sha256_client(self): + transport = FakeTransport() + transport.server_mode = False + kex = KexGroup14SHA256(transport) + kex.start_kex() + x = b"1E00000101009850B3A8DE3ECCD3F19644139137C93D9C11BC28ED8BE850908EE294E1D43B88B9295311EFAEF5B736A1B652EBE184CCF36CFB0681C1ED66430088FA448B83619F928E7B9592ED6160EC11D639D51C303603F930F743C646B1B67DA38A1D44598DCE6C3F3019422B898044141420E9A10C29B9C58668F7F20A40F154B2C4768FCF7A9AA7179FB6366A7167EE26DD58963E8B880A0572F641DE0A73DC74C930F7C3A0C9388553F3F8403E40CF8B95FEDB1D366596FCF3FDDEB21A0005ADA650EF1733628D807BE5ACB83925462765D9076570056E39994FB328E3108FE406275758D6BF5F32790EF15D8416BF5548164859E785DB45E7787BB0E727ADE08641ED" # noqa + self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect + ) + + # fake "reply" + msg = Message() + msg.add_string("fake-host-key") + msg.add_mpint(69) + msg.add_string("fake-sig") + msg.rewind() + kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg) + K = 21526936926159575624241589599003964979640840086252478029709904308461709651400109485351462666820496096345766733042945918306284902585618061272525323382142547359684512114160415969631877620660064043178086464811345023251493620331559440565662862858765724251890489795332144543057725932216208403143759943169004775947331771556537814494448612329251887435553890674764339328444948425882382475260315505741818518926349729970262019325118040559191290279100613049085709127598666890434114956464502529053036826173452792849566280474995114751780998069614898221773345705289637708545219204637224261997310181473787577166103031529148842107599 # noqa + H = b"D007C23686BE8A7737F828DC9E899F8EB5AF423F495F138437BE2529C1B8455F" + self.assertEqual(K, transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) + self.assertTrue(transport._activated) + + def test_kex_group14_sha256_server(self): + transport = FakeTransport() + transport.server_mode = True + kex = KexGroup14SHA256(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect + ) + + msg = Message() + msg.add_mpint(69) + msg.rewind() + kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg) + K = 21526936926159575624241589599003964979640840086252478029709904308461709651400109485351462666820496096345766733042945918306284902585618061272525323382142547359684512114160415969631877620660064043178086464811345023251493620331559440565662862858765724251890489795332144543057725932216208403143759943169004775947331771556537814494448612329251887435553890674764339328444948425882382475260315505741818518926349729970262019325118040559191290279100613049085709127598666890434114956464502529053036826173452792849566280474995114751780998069614898221773345705289637708545219204637224261997310181473787577166103031529148842107599 # noqa + H = b"15080A19894D489ACD0DA724480E1B08E71293E07EBC25FAD10F263C00B343DC" + x = b"1F0000000866616B652D6B657900000101009850B3A8DE3ECCD3F19644139137C93D9C11BC28ED8BE850908EE294E1D43B88B9295311EFAEF5B736A1B652EBE184CCF36CFB0681C1ED66430088FA448B83619F928E7B9592ED6160EC11D639D51C303603F930F743C646B1B67DA38A1D44598DCE6C3F3019422B898044141420E9A10C29B9C58668F7F20A40F154B2C4768FCF7A9AA7179FB6366A7167EE26DD58963E8B880A0572F641DE0A73DC74C930F7C3A0C9388553F3F8403E40CF8B95FEDB1D366596FCF3FDDEB21A0005ADA650EF1733628D807BE5ACB83925462765D9076570056E39994FB328E3108FE406275758D6BF5F32790EF15D8416BF5548164859E785DB45E7787BB0E727ADE08641ED0000000866616B652D736967" # noqa self.assertEqual(K, transport._K) self.assertEqual(H, hexlify(transport._H).upper()) self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) self.assertTrue(transport._activated) + def test_kex_group16_sha512_client(self): + transport = FakeTransport() + transport.server_mode = False + kex = KexGroup16SHA512(transport) + kex.start_kex() + x = b"1E0000020100859FF55A23E0F66463561DD8BFC4764C69C05F85665B06EC9E29EF5003A53A8FA890B6A6EB624DEB55A4FB279DE7010A53580A126817E3D235B05A1081662B1500961D0625F0AAD287F1B597CBA9DB9550D9CC26355C4C59F92E613B5C21AC191F152C09A5DB46DCBA5EA58E3CA6A8B0EB7183E27FAC10106022E8521FA91240FB389060F1E1E4A355049D29DCC82921CE6588791743E4B1DEEE0166F7CC5180C3C75F3773342DF95C8C10AAA5D12975257027936B99B3DED6E6E98CF27EADEAEAE04E7F0A28071F578646B985FCE28A59CEB36287CB65759BE0544D4C4018CDF03C9078FE9CA79ECA611CB6966899E6FD29BE0781491C659FE2380E0D99D50D9CFAAB94E61BE311779719C4C43C6D223AD3799C3915A9E55076A21152DBBF911D6594296D6ECDC1B6FA71997CD29DF987B80FCA7F36BB7F19863C72BBBF839746AFBF9A5B407D468C976AA3E36FA118D3EAAD2E08BF6AE219F81F2CE2BE946337F06CC09BBFABE938A4087E413921CBEC1965ED905999B83396ECA226110CDF6EFB80F815F6489AF87561DA3857F13A7705921306D94176231FBB336B17C3724BC17A28BECB910093AB040873D5D760E8C182B88ECCE3E38DDA68CE35BD152DF7550BD908791FCCEDD1FFDF5ED2A57FFAE79599E487A7726D8A3D950B1729A08FBB60EE462A6BBE8BF0F5F0E1358129A37840FE5B3EEB8BF26E99FA222EAE83" # noqa + self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_REPLY,), transport._expect + ) + + # fake "reply" + msg = Message() + msg.add_string("fake-host-key") + msg.add_mpint(69) + msg.add_string("fake-sig") + msg.rewind() + kex.parse_next(paramiko.kex_group1._MSG_KEXDH_REPLY, msg) + K = 933242830095376162107925500057692534838883186615567574891154103836907630698358649443101764908667358576734565553213003142941996368306996312915844839972197961603283544950658467545799914435739152351344917376359963584614213874232577733869049670230112638724993540996854599166318001059065780674008011575015459772051180901213815080343343801745386220342919837913506966863570473712948197760657442974564354432738520446202131551650771882909329069340612274196233658123593466135642819578182367229641847749149740891990379052266213711500434128970973602206842980669193719602075489724202241641553472106310932258574377789863734311328542715212248147206865762697424822447603031087553480483833829498375309975229907460562402877655519980113688369262871485777790149373908739910846630414678346163764464587129010141922982925829457954376352735653834300282864445132624993186496129911208133529828461690634463092007726349795944930302881758403402084584307180896465875803621285362317770276493727205689466142632599776710824902573926951951209239626732358074877997756011804454926541386215567756538832824717436605031489511654178384081883801272314328403020205577714999460724519735573055540814037716770051316113795603990199374791348798218428912977728347485489266146775472 # noqa + H = b"F6E2BCC846B9B62591EFB86663D55D4769CA06B2EDABE469DF831639B2DDD5A271985011900A724CB2C87F19F347B3632A7C1536AF3D12EE463E6EA75281AF0C" # noqa + self.assertEqual(K, transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) + self.assertTrue(transport._activated) + + def test_kex_group16_sha512_server(self): + transport = FakeTransport() + transport.server_mode = True + kex = KexGroup16SHA512(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_group1._MSG_KEXDH_INIT,), transport._expect + ) + msg = Message() + msg.add_mpint(69) + msg.rewind() + kex.parse_next(paramiko.kex_group1._MSG_KEXDH_INIT, msg) + K = 933242830095376162107925500057692534838883186615567574891154103836907630698358649443101764908667358576734565553213003142941996368306996312915844839972197961603283544950658467545799914435739152351344917376359963584614213874232577733869049670230112638724993540996854599166318001059065780674008011575015459772051180901213815080343343801745386220342919837913506966863570473712948197760657442974564354432738520446202131551650771882909329069340612274196233658123593466135642819578182367229641847749149740891990379052266213711500434128970973602206842980669193719602075489724202241641553472106310932258574377789863734311328542715212248147206865762697424822447603031087553480483833829498375309975229907460562402877655519980113688369262871485777790149373908739910846630414678346163764464587129010141922982925829457954376352735653834300282864445132624993186496129911208133529828461690634463092007726349795944930302881758403402084584307180896465875803621285362317770276493727205689466142632599776710824902573926951951209239626732358074877997756011804454926541386215567756538832824717436605031489511654178384081883801272314328403020205577714999460724519735573055540814037716770051316113795603990199374791348798218428912977728347485489266146775472 # noqa + H = b"F97BB05A572A663688CA7EA1AA812D3C82EE6C8FA9D4B1D69435783D931157F199909EA38B003E4E4385C8861183CBFF0CF0EF1433A8B3C69AB4DD9420FCC85F" # noqa + x = b"1F0000000866616B652D6B65790000020100859FF55A23E0F66463561DD8BFC4764C69C05F85665B06EC9E29EF5003A53A8FA890B6A6EB624DEB55A4FB279DE7010A53580A126817E3D235B05A1081662B1500961D0625F0AAD287F1B597CBA9DB9550D9CC26355C4C59F92E613B5C21AC191F152C09A5DB46DCBA5EA58E3CA6A8B0EB7183E27FAC10106022E8521FA91240FB389060F1E1E4A355049D29DCC82921CE6588791743E4B1DEEE0166F7CC5180C3C75F3773342DF95C8C10AAA5D12975257027936B99B3DED6E6E98CF27EADEAEAE04E7F0A28071F578646B985FCE28A59CEB36287CB65759BE0544D4C4018CDF03C9078FE9CA79ECA611CB6966899E6FD29BE0781491C659FE2380E0D99D50D9CFAAB94E61BE311779719C4C43C6D223AD3799C3915A9E55076A21152DBBF911D6594296D6ECDC1B6FA71997CD29DF987B80FCA7F36BB7F19863C72BBBF839746AFBF9A5B407D468C976AA3E36FA118D3EAAD2E08BF6AE219F81F2CE2BE946337F06CC09BBFABE938A4087E413921CBEC1965ED905999B83396ECA226110CDF6EFB80F815F6489AF87561DA3857F13A7705921306D94176231FBB336B17C3724BC17A28BECB910093AB040873D5D760E8C182B88ECCE3E38DDA68CE35BD152DF7550BD908791FCCEDD1FFDF5ED2A57FFAE79599E487A7726D8A3D950B1729A08FBB60EE462A6BBE8BF0F5F0E1358129A37840FE5B3EEB8BF26E99FA222EAE830000000866616B652D736967" # noqa + self.assertEqual(K, transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual(x, hexlify(transport._message.asbytes()).upper()) + self.assertTrue(transport._activated) + + @pytest.mark.skipif("not KexCurve25519.is_available()") + def test_kex_c25519_client(self): + K = 71294722834835117201316639182051104803802881348227506835068888449366462300724 # noqa + transport = FakeTransport() + transport.server_mode = False + kex = KexCurve25519(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_curve25519._MSG_KEXECDH_REPLY,), transport._expect + ) + + # fake reply + msg = Message() + msg.add_string("fake-host-key") + Q_S = unhexlify( + "8d13a119452382a1ada8eea4c979f3e63ad3f0c7366786d6c5b54b87219bae49" + ) + msg.add_string(Q_S) + msg.add_string("fake-sig") + msg.rewind() + kex.parse_next(paramiko.kex_curve25519._MSG_KEXECDH_REPLY, msg) + H = b"05B6F6437C0CF38D1A6C5A6F6E2558DEB54E7FC62447EBFB1E5D7407326A5475" + self.assertEqual(K, kex.transport._K) + self.assertEqual(H, hexlify(transport._H).upper()) + self.assertEqual((b"fake-host-key", b"fake-sig"), transport._verify) + self.assertTrue(transport._activated) + + @pytest.mark.skipif("not KexCurve25519.is_available()") + def test_kex_c25519_server(self): + K = 71294722834835117201316639182051104803802881348227506835068888449366462300724 # noqa + transport = FakeTransport() + transport.server_mode = True + kex = KexCurve25519(transport) + kex.start_kex() + self.assertEqual( + (paramiko.kex_curve25519._MSG_KEXECDH_INIT,), transport._expect + ) + + # fake init + msg = Message() + Q_C = unhexlify( + "8d13a119452382a1ada8eea4c979f3e63ad3f0c7366786d6c5b54b87219bae49" + ) + H = b"DF08FCFCF31560FEE639D9B6D56D760BC3455B5ADA148E4514181023E7A9B042" + msg.add_string(Q_C) + msg.rewind() + kex.parse_next(paramiko.kex_curve25519._MSG_KEXECDH_INIT, msg) + self.assertEqual(K, transport._K) + self.assertTrue(transport._activated) + self.assertEqual(H, hexlify(transport._H).upper()) diff --git a/tests/test_kex_gss.py b/tests/test_kex_gss.py index 3bf788daa..6f5625dc8 100644 --- a/tests/test_kex_gss.py +++ b/tests/test_kex_gss.py @@ -31,15 +31,16 @@ import paramiko +from .util import needs_gssapi, KerberosTestCase, update_env -class NullServer (paramiko.ServerInterface): +class NullServer(paramiko.ServerInterface): def get_allowed_auths(self, username): - return 'gssapi-keyex' + return "gssapi-keyex" - def check_auth_gssapi_keyex(self, username, - gss_authenticated=paramiko.AUTH_FAILED, - cc_file=None): + def check_auth_gssapi_keyex( + self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None + ): if gss_authenticated == paramiko.AUTH_SUCCESSFUL: return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED @@ -52,26 +53,22 @@ def check_channel_request(self, kind, chanid): return paramiko.OPEN_SUCCEEDED def check_channel_exec_request(self, channel, command): - if command != 'yes': + if command != b"yes": return False return True -class GSSKexTest(unittest.TestCase): - @staticmethod - def init(username, hostname): - global krb5_principal, targ_name - krb5_principal = username - targ_name = hostname - +@needs_gssapi +class GSSKexTest(KerberosTestCase): def setUp(self): - self.username = krb5_principal - self.hostname = socket.getfqdn(targ_name) + self.username = self.realm.user_princ + self.hostname = socket.getfqdn(self.realm.hostname) self.sockl = socket.socket() - self.sockl.bind((targ_name, 0)) + self.sockl.bind((self.realm.hostname, 0)) self.sockl.listen(1) self.addr, self.port = self.sockl.getsockname() self.event = threading.Event() + update_env(self, self.realm.env) thread = threading.Thread(target=self._run) thread.start() @@ -83,49 +80,75 @@ def tearDown(self): def _run(self): self.socks, addr = self.sockl.accept() self.ts = paramiko.Transport(self.socks, gss_kex=True) - host_key = paramiko.RSAKey.from_private_key_file('tests/test_rsa.key') + host_key = paramiko.RSAKey.from_private_key_file("tests/test_rsa.key") self.ts.add_server_key(host_key) - self.ts.set_gss_host(targ_name) + self.ts.set_gss_host(self.realm.hostname) try: self.ts.load_server_moduli() except: - print ('(Failed to load moduli -- gex will be unsupported.)') + print("(Failed to load moduli -- gex will be unsupported.)") server = NullServer() self.ts.start_server(self.event, server) - def test_1_gsskex_and_auth(self): + def _test_gsskex_and_auth(self, gss_host, rekey=False): """ Verify that Paramiko can handle SSHv2 GSS-API / SSPI authenticated Diffie-Hellman Key Exchange and user authentication with the GSS-API context created during key exchange. """ - host_key = paramiko.RSAKey.from_private_key_file('tests/test_rsa.key') + host_key = paramiko.RSAKey.from_private_key_file("tests/test_rsa.key") public_host_key = paramiko.RSAKey(data=host_key.asbytes()) self.tc = paramiko.SSHClient() - self.tc.get_host_keys().add('[%s]:%d' % (self.hostname, self.port), - 'ssh-rsa', public_host_key) - self.tc.connect(self.hostname, self.port, username=self.username, - gss_auth=True, gss_kex=True) + self.tc.get_host_keys().add( + "[%s]:%d" % (self.hostname, self.port), "ssh-rsa", public_host_key + ) + self.tc.connect( + self.hostname, + self.port, + username=self.username, + gss_auth=True, + gss_kex=True, + gss_host=gss_host, + ) self.event.wait(1.0) self.assert_(self.event.is_set()) self.assert_(self.ts.is_active()) self.assertEquals(self.username, self.ts.get_username()) self.assertEquals(True, self.ts.is_authenticated()) + self.assertEquals(True, self.tc.get_transport().gss_kex_used) - stdin, stdout, stderr = self.tc.exec_command('yes') + stdin, stdout, stderr = self.tc.exec_command("yes") schan = self.ts.accept(1.0) + if rekey: + self.tc.get_transport().renegotiate_keys() - schan.send('Hello there.\n') - schan.send_stderr('This is on stderr.\n') + schan.send("Hello there.\n") + schan.send_stderr("This is on stderr.\n") schan.close() - self.assertEquals('Hello there.\n', stdout.readline()) - self.assertEquals('', stdout.readline()) - self.assertEquals('This is on stderr.\n', stderr.readline()) - self.assertEquals('', stderr.readline()) + self.assertEquals("Hello there.\n", stdout.readline()) + self.assertEquals("", stdout.readline()) + self.assertEquals("This is on stderr.\n", stderr.readline()) + self.assertEquals("", stderr.readline()) stdin.close() stdout.close() stderr.close() + + def test_gsskex_and_auth(self): + """ + Verify that Paramiko can handle SSHv2 GSS-API / SSPI authenticated + Diffie-Hellman Key Exchange and user authentication with the GSS-API + context created during key exchange. + """ + self._test_gsskex_and_auth(gss_host=None) + + # To be investigated, see https://github.com/paramiko/paramiko/issues/1312 + @unittest.expectedFailure + def test_gsskex_and_auth_rekey(self): + """ + Verify that Paramiko can rekey. + """ + self._test_gsskex_and_auth(gss_host=None, rekey=True) diff --git a/tests/test_message.py b/tests/test_message.py index f18cae90b..57766d904 100644 --- a/tests/test_message.py +++ b/tests/test_message.py @@ -21,24 +21,28 @@ """ import unittest + from paramiko.message import Message from paramiko.common import byte_chr, zero_byte -class MessageTest (unittest.TestCase): +class MessageTest(unittest.TestCase): - __a = b'\x00\x00\x00\x17\x07\x60\xe0\x90\x00\x00\x00\x01\x71\x00\x00\x00\x05\x68\x65\x6c\x6c\x6f\x00\x00\x03\xe8' + b'x' * 1000 - __b = b'\x01\x00\xf3\x00\x3f\x00\x00\x00\x10\x68\x75\x65\x79\x2c\x64\x65\x77\x65\x79\x2c\x6c\x6f\x75\x69\x65' - __c = b'\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x01\x11\x00\x00\x00\x07\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x06\x9a\x1b\x2c\x3d\x4e\xf7' - __d = b'\x00\x00\x00\x05\xff\x00\x00\x00\x05\x11\x22\x33\x44\x55\xff\x00\x00\x00\x0a\x00\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x63\x61\x74\x00\x00\x00\x03\x61\x2c\x62' + __a = ( + b"\x00\x00\x00\x17\x07\x60\xe0\x90\x00\x00\x00\x01\x71\x00\x00\x00\x05\x68\x65\x6c\x6c\x6f\x00\x00\x03\xe8" # noqa + + b"x" * 1000 + ) + __b = b"\x01\x00\xf3\x00\x3f\x00\x00\x00\x10\x68\x75\x65\x79\x2c\x64\x65\x77\x65\x79\x2c\x6c\x6f\x75\x69\x65" # noqa + __c = b"\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x01\x11\x00\x00\x00\x07\x00\xf5\xe4\xd3\xc2\xb1\x09\x00\x00\x00\x06\x9a\x1b\x2c\x3d\x4e\xf7" # noqa + __d = b"\x00\x00\x00\x05\xff\x00\x00\x00\x05\x11\x22\x33\x44\x55\xff\x00\x00\x00\x0a\x00\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x63\x61\x74\x00\x00\x00\x03\x61\x2c\x62" # noqa - def test_1_encode(self): + def test_encode(self): msg = Message() msg.add_int(23) msg.add_int(123789456) - msg.add_string('q') - msg.add_string('hello') - msg.add_string('x' * 1000) + msg.add_string("q") + msg.add_string("hello") + msg.add_string("x" * 1000) self.assertEqual(msg.asbytes(), self.__a) msg = Message() @@ -47,7 +51,7 @@ def test_1_encode(self): msg.add_byte(byte_chr(0xf3)) msg.add_bytes(zero_byte + byte_chr(0x3f)) - msg.add_list(['huey', 'dewey', 'louie']) + msg.add_list(["huey", "dewey", "louie"]) self.assertEqual(msg.asbytes(), self.__b) msg = Message() @@ -58,20 +62,20 @@ def test_1_encode(self): msg.add_mpint(-0x65e4d3c2b109) self.assertEqual(msg.asbytes(), self.__c) - def test_2_decode(self): + def test_decode(self): msg = Message(self.__a) self.assertEqual(msg.get_int(), 23) self.assertEqual(msg.get_int(), 123789456) - self.assertEqual(msg.get_text(), 'q') - self.assertEqual(msg.get_text(), 'hello') - self.assertEqual(msg.get_text(), 'x' * 1000) + self.assertEqual(msg.get_text(), "q") + self.assertEqual(msg.get_text(), "hello") + self.assertEqual(msg.get_text(), "x" * 1000) msg = Message(self.__b) self.assertEqual(msg.get_boolean(), True) self.assertEqual(msg.get_boolean(), False) self.assertEqual(msg.get_byte(), byte_chr(0xf3)) self.assertEqual(msg.get_bytes(2), zero_byte + byte_chr(0x3f)) - self.assertEqual(msg.get_list(), ['huey', 'dewey', 'louie']) + self.assertEqual(msg.get_list(), ["huey", "dewey", "louie"]) msg = Message(self.__c) self.assertEqual(msg.get_int64(), 5) @@ -80,17 +84,17 @@ def test_2_decode(self): self.assertEqual(msg.get_mpint(), 0xf5e4d3c2b109) self.assertEqual(msg.get_mpint(), -0x65e4d3c2b109) - def test_3_add(self): + def test_add(self): msg = Message() msg.add(5) msg.add(0x1122334455) msg.add(0xf00000000000000000) msg.add(True) - msg.add('cat') - msg.add(['a', 'b']) + msg.add("cat") + msg.add(["a", "b"]) self.assertEqual(msg.asbytes(), self.__d) - def test_4_misc(self): + def test_misc(self): msg = Message(self.__d) self.assertEqual(msg.get_adaptive_int(), 5) self.assertEqual(msg.get_adaptive_int(), 0x1122334455) diff --git a/tests/test_packetizer.py b/tests/test_packetizer.py index ccfe26bdf..de80770ef 100644 --- a/tests/test_packetizer.py +++ b/tests/test_packetizer.py @@ -20,34 +20,35 @@ Some unit tests for the ssh2 protocol in Transport. """ +import sys import unittest from hashlib import sha1 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import algorithms, Cipher, modes -from tests.loop import LoopSocket - from paramiko import Message, Packetizer, util from paramiko.common import byte_chr, zero_byte +from .loop import LoopSocket + + x55 = byte_chr(0x55) x1f = byte_chr(0x1f) -class PacketizerTest (unittest.TestCase): - - def test_1_write(self): +class PacketizerTest(unittest.TestCase): + def test_write(self): rsock = LoopSocket() wsock = LoopSocket() rsock.link(wsock) p = Packetizer(wsock) - p.set_log(util.get_logger('paramiko.transport')) + p.set_log(util.get_logger("paramiko.transport")) p.set_hexdump(True) encryptor = Cipher( algorithms.AES(zero_byte * 16), modes.CBC(x55 * 16), - backend=default_backend() + backend=default_backend(), ).encryptor() p.set_outbound_cipher(encryptor, 16, sha1, 12, x1f * 20) @@ -62,39 +63,46 @@ def test_1_write(self): data = rsock.recv(100) # 32 + 12 bytes of MAC = 44 self.assertEqual(44, len(data)) - self.assertEqual(b'\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0', data[:16]) + self.assertEqual( + b"\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0", # noqa + data[:16], + ) - def test_2_read(self): + def test_read(self): rsock = LoopSocket() wsock = LoopSocket() rsock.link(wsock) p = Packetizer(rsock) - p.set_log(util.get_logger('paramiko.transport')) + p.set_log(util.get_logger("paramiko.transport")) p.set_hexdump(True) decryptor = Cipher( algorithms.AES(zero_byte * 16), modes.CBC(x55 * 16), - backend=default_backend() + backend=default_backend(), ).decryptor() p.set_inbound_cipher(decryptor, 16, sha1, 12, x1f * 20) - wsock.send(b'\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59') + wsock.send( + b"\x43\x91\x97\xbd\x5b\x50\xac\x25\x87\xc2\xc4\x6b\xc7\xe9\x38\xc0\x90\xd2\x16\x56\x0d\x71\x73\x61\x38\x7c\x4c\x3d\xfb\x97\x7d\xe2\x6e\x03\xb1\xa0\xc2\x1c\xd6\x41\x41\x4c\xb4\x59" # noqa + ) cmd, m = p.read_message() self.assertEqual(100, cmd) self.assertEqual(100, m.get_int()) self.assertEqual(1, m.get_int()) self.assertEqual(900, m.get_int()) - def test_3_closed(self): + def test_closed(self): + if sys.platform.startswith("win"): # no SIGALRM on windows + return rsock = LoopSocket() wsock = LoopSocket() rsock.link(wsock) p = Packetizer(wsock) - p.set_log(util.get_logger('paramiko.transport')) + p.set_log(util.get_logger("paramiko.transport")) p.set_hexdump(True) encryptor = Cipher( algorithms.AES(zero_byte * 16), modes.CBC(x55 * 16), - backend=default_backend() + backend=default_backend(), ).encryptor() p.set_outbound_cipher(encryptor, 16, sha1, 12, x1f * 20) @@ -112,9 +120,13 @@ def test_3_closed(self): import signal class TimeoutError(Exception): - pass + def __init__(self, error_message): + if hasattr(errno, "ETIME"): + self.message = os.sterror(errno.ETIME) + else: + self.messaage = error_message - def timeout(seconds=1, error_message=os.strerror(errno.ETIME)): + def timeout(seconds=1, error_message="Timer expired"): def decorator(func): def _handle_timeout(signum, frame): raise TimeoutError(error_message) @@ -131,5 +143,6 @@ def wrapper(*args, **kwargs): return wraps(func)(wrapper) return decorator + send = timeout()(p.send_message) self.assertRaises(EOFError, send, m) diff --git a/tests/test_pkey.py b/tests/test_pkey.py index 2181dd914..d44a96ac4 100644 --- a/tests/test_pkey.py +++ b/tests/test_pkey.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright (C) 2003-2009 Robey Pointer # # This file is part of paramiko. @@ -22,28 +23,53 @@ import unittest import os +import stat from binascii import hexlify from hashlib import md5 -import base64 -from paramiko import RSAKey, DSSKey, ECDSAKey, Message, util -from paramiko.py3compat import StringIO, byte_chr, b, bytes +from paramiko import ( + RSAKey, + DSSKey, + ECDSAKey, + Ed25519Key, + Message, + util, + SSHException, +) +from paramiko.py3compat import StringIO, byte_chr, b, bytes, PY2 +from paramiko.common import o600 + +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateNumbers +from mock import patch, Mock +import pytest + +from .util import _support, is_low_entropy -from tests.util import test_path # from openssh's ssh-keygen -PUB_RSA = 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4c=' -PUB_DSS = 'ssh-dss AAAAB3NzaC1kc3MAAACBAOeBpgNnfRzr/twmAQRu2XwWAp3CFtrVnug6s6fgwj/oLjYbVtjAy6pl/h0EKCWx2rf1IetyNsTxWrniA9I6HeDj65X1FyDkg6g8tvCnaNB8Xp/UUhuzHuGsMIipRxBxw9LF608EqZcj1E3ytktoW5B5OcjrkEoz3xG7C+rpIjYvAAAAFQDwz4UnmsGiSNu5iqjn3uTzwUpshwAAAIEAkxfFeY8P2wZpDjX0MimZl5wkoFQDL25cPzGBuB4OnB8NoUk/yjAHIIpEShw8V+LzouMK5CTJQo5+Ngw3qIch/WgRmMHy4kBq1SsXMjQCte1So6HBMvBPIW5SiMTmjCfZZiw4AYHK+B/JaOwaG9yRg2Ejg4Ok10+XFDxlqZo8Y+wAAACARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmnjO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacIBlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgE=' -PUB_ECDSA_256 = 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJSPZm3ZWkvk/Zx8WP+fZRZ5/NBBHnGQwR6uIC6XHGPDIHuWUzIjAwA0bzqkOUffEsbLe+uQgKl5kbc/L8KA/eo=' -PUB_ECDSA_384 = 'ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBbGibQLW9AAZiGN2hEQxWYYoFaWKwN3PKSaDJSMqmIn1Z9sgRUuw8Y/w502OGvXL/wFk0i2z50l3pWZjD7gfMH7gX5TUiCzwrQkS+Hn1U2S9aF5WJp0NcIzYxXw2r4M2A==' -PUB_ECDSA_521 = 'ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBACaOaFLZGuxa5AW16qj6VLypFbLrEWrt9AZUloCMefxO8bNLjK/O5g0rAVasar1TnyHE9qj4NwzANZASWjQNbc4MAG8vzqezFwLIn/kNyNTsXNfqEko9OgHZknlj2Z79dwTJcRAL4QLcT5aND0EHZLB2fAUDXiWIb2j4rg1mwPlBMiBXA==' - -FINGER_RSA = '1024 60:73:38:44:cb:51:86:65:7f:de:da:a2:2b:5a:57:d5' -FINGER_DSS = '1024 44:78:f0:b9:a2:3c:c5:18:20:09:ff:75:5b:c1:d2:6c' -FINGER_ECDSA_256 = '256 25:19:eb:55:e6:a1:47:ff:4f:38:d2:75:6f:a5:d5:60' -FINGER_ECDSA_384 = '384 c1:8d:a0:59:09:47:41:8e:a8:a6:07:01:29:23:b4:65' -FINGER_ECDSA_521 = '521 44:58:22:52:12:33:16:0e:ce:0e:be:2c:7c:7e:cc:1e' -SIGNED_RSA = '20:d7:8a:31:21:cb:f7:92:12:f2:a4:89:37:f5:78:af:e6:16:b6:25:b9:97:3d:a2:cd:5f:ca:20:21:73:4c:ad:34:73:8f:20:77:28:e2:94:15:08:d8:91:40:7a:85:83:bf:18:37:95:dc:54:1a:9b:88:29:6c:73:ca:38:b4:04:f1:56:b9:f2:42:9d:52:1b:29:29:b4:4f:fd:c9:2d:af:47:d2:40:76:30:f3:63:45:0c:d9:1d:43:86:0f:1c:70:e2:93:12:34:f3:ac:c5:0a:2f:14:50:66:59:f1:88:ee:c1:4a:e9:d1:9c:4e:46:f0:0e:47:6f:38:74:f1:44:a8' +PUB_RSA = "ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4c=" # noqa +PUB_DSS = "ssh-dss AAAAB3NzaC1kc3MAAACBAOeBpgNnfRzr/twmAQRu2XwWAp3CFtrVnug6s6fgwj/oLjYbVtjAy6pl/h0EKCWx2rf1IetyNsTxWrniA9I6HeDj65X1FyDkg6g8tvCnaNB8Xp/UUhuzHuGsMIipRxBxw9LF608EqZcj1E3ytktoW5B5OcjrkEoz3xG7C+rpIjYvAAAAFQDwz4UnmsGiSNu5iqjn3uTzwUpshwAAAIEAkxfFeY8P2wZpDjX0MimZl5wkoFQDL25cPzGBuB4OnB8NoUk/yjAHIIpEShw8V+LzouMK5CTJQo5+Ngw3qIch/WgRmMHy4kBq1SsXMjQCte1So6HBMvBPIW5SiMTmjCfZZiw4AYHK+B/JaOwaG9yRg2Ejg4Ok10+XFDxlqZo8Y+wAAACARmR7CCPjodxASvRbIyzaVpZoJ/Z6x7dAumV+ysrV1BVYd0lYukmnjO1kKBWApqpH1ve9XDQYN8zgxM4b16L21kpoWQnZtXrY3GZ4/it9kUgyB7+NwacIBlXa8cMDL7Q/69o0d54U0X/NeX5QxuYR6OMJlrkQB7oiW/P/1mwjQgE=" # noqa +PUB_ECDSA_256 = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJSPZm3ZWkvk/Zx8WP+fZRZ5/NBBHnGQwR6uIC6XHGPDIHuWUzIjAwA0bzqkOUffEsbLe+uQgKl5kbc/L8KA/eo=" # noqa +PUB_ECDSA_384 = "ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBbGibQLW9AAZiGN2hEQxWYYoFaWKwN3PKSaDJSMqmIn1Z9sgRUuw8Y/w502OGvXL/wFk0i2z50l3pWZjD7gfMH7gX5TUiCzwrQkS+Hn1U2S9aF5WJp0NcIzYxXw2r4M2A==" # noqa +PUB_ECDSA_521 = "ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBACaOaFLZGuxa5AW16qj6VLypFbLrEWrt9AZUloCMefxO8bNLjK/O5g0rAVasar1TnyHE9qj4NwzANZASWjQNbc4MAG8vzqezFwLIn/kNyNTsXNfqEko9OgHZknlj2Z79dwTJcRAL4QLcT5aND0EHZLB2fAUDXiWIb2j4rg1mwPlBMiBXA==" # noqa +PUB_RSA_2K_OPENSSH = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDF+Dpr54DX0WdeTDpNAMdkCWEkl3OXtNgf58qlN1gX572OLBqLf0zT4bHstUEpU3piazph/rSWcUMuBoD46tZ6jiH7H9b9Pem2eYQWaELDDkM+v9BMbEy5rMbFRLol5OtEvPFqneyEAanPOgvd8t3yyhSev9QVusakzJ8j8LGgrA8huYZ+Srnw0shEWLG70KUKCh3rG0QIvA8nfhtUOisr2Gp+F0YxMGb5gwBlQYAYE5l6u1SjZ7hNjyNosjK+wRBFgFFBYVpkZKJgWoK9w4ijFyzMZTucnZMqKOKAjIJvHfKBf2/cEfYxSq1EndqTqjYsd9T7/s2vcn1OH5a0wkER" # noqa +RSA_2K_OPENSSH_P = 161773687847617758886803946572654778625119997081005961935077336594287351354258259920334554906235187683459069634729972458348855793639393524799865799559575414247668746919721196359908321800753913350455861871582087986355637886875933045224711827701526739934602161222599672381604211130651397331775901258858869418853 # noqa +RSA_2K_OPENSSH_Q = 154483416325630619558401349033571772244816915504195060221073502923720741119664820208064202825686848103224453777955988437823797692957091438442833606009978046057345917301441832647551208158342812551003395417862260727795454409459089912659057393394458150862012620127030757893820711157099494238156383382454310199869 # noqa +PUB_DSS_1K_OPENSSH = "ssh-dss AAAAB3NzaC1kc3MAAACBAL8XEx7F9xuwBNles+vWpNF+YcofrBhjX1r5QhpBe0eoYWLHRcroN6lxwCdGYRfgOoRjTncBiixQX/uUxAY96zDh3ir492s2BcJt4ihvNn/AY0I0OTuX/2IwGk9CGzafjaeZNVYxMa8lcVt0hSOTjkPQ7gVuk6bJzMInvie+VWKLAAAAFQDUgYdY+rhR0SkKbC09BS/SIHcB+wAAAIB44+4zpCNcd0CGvZlowH99zyPX8uxQtmTLQFuR2O8O0FgVVuCdDgD0D9W8CLOp32oatpM0jyyN89EdvSWzjHzZJ+L6H1FtZps7uhpDFWHdva1R25vyGecLMUuXjo5t/D7oCDih+HwHoSAxoi0QvsPd8/qqHQVznNJKtR6thUpXEwAAAIAG4DCBjbgTTgpBw0egRkJwBSz0oTt+1IcapNU2jA6N8urMSk9YXHEQHKN68BAF3YJ59q2Ujv3LOXmBqGd1T+kzwUszfMlgzq8MMu19Yfzse6AIK1Agn1Vj6F7YXLsXDN+T4KszX5+FJa7t/Zsp3nALWy6l0f4WKivEF5Y2QpEFcQ==" # noqa +PUB_EC_384_OPENSSH = "ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIch5LXTq/L/TWsTGG6dIktxD8DIMh7EfvoRmWsks6CuNDTvFvbQNtY4QO1mn5OXegHbS0M5DPIS++wpKGFP3suDEH08O35vZQasLNrL0tO2jyyEnzB2ZEx3PPYci811yg==" # noqa + +FINGER_RSA = "1024 60:73:38:44:cb:51:86:65:7f:de:da:a2:2b:5a:57:d5" +FINGER_DSS = "1024 44:78:f0:b9:a2:3c:c5:18:20:09:ff:75:5b:c1:d2:6c" +FINGER_ECDSA_256 = "256 25:19:eb:55:e6:a1:47:ff:4f:38:d2:75:6f:a5:d5:60" +FINGER_ECDSA_384 = "384 c1:8d:a0:59:09:47:41:8e:a8:a6:07:01:29:23:b4:65" +FINGER_ECDSA_521 = "521 44:58:22:52:12:33:16:0e:ce:0e:be:2c:7c:7e:cc:1e" +SIGNED_RSA = "20:d7:8a:31:21:cb:f7:92:12:f2:a4:89:37:f5:78:af:e6:16:b6:25:b9:97:3d:a2:cd:5f:ca:20:21:73:4c:ad:34:73:8f:20:77:28:e2:94:15:08:d8:91:40:7a:85:83:bf:18:37:95:dc:54:1a:9b:88:29:6c:73:ca:38:b4:04:f1:56:b9:f2:42:9d:52:1b:29:29:b4:4f:fd:c9:2d:af:47:d2:40:76:30:f3:63:45:0c:d9:1d:43:86:0f:1c:70:e2:93:12:34:f3:ac:c5:0a:2f:14:50:66:59:f1:88:ee:c1:4a:e9:d1:9c:4e:46:f0:0e:47:6f:38:74:f1:44:a8" # noqa +SIGNED_RSA_256 = "cc:6:60:e0:0:2c:ac:9e:26:bc:d5:68:64:3f:9f:a7:e5:aa:41:eb:88:4a:25:5:9c:93:84:66:ef:ef:60:f4:34:fb:f4:c8:3d:55:33:6a:77:bd:b2:ee:83:f:71:27:41:7e:f5:7:5:0:a9:4c:7:80:6f:be:76:67:cb:58:35:b9:2b:f3:c2:d3:3c:ee:e1:3f:59:e0:fa:e4:5c:92:ed:ae:74:de:d:d6:27:16:8f:84:a3:86:68:c:94:90:7d:6e:cc:81:12:d8:b6:ad:aa:31:a8:13:3d:63:81:3e:bb:5:b6:38:4d:2:d:1b:5b:70:de:83:cc:3a:cb:31" # noqa +SIGNED_RSA_512 = "87:46:8b:75:92:33:78:a0:22:35:32:39:23:c6:ab:e1:6:92:ad:bc:7f:6e:ab:19:32:e4:78:b2:2c:8f:1d:c:65:da:fc:a5:7:ca:b6:55:55:31:83:b1:a0:af:d1:95:c5:2e:af:56:ba:f5:41:64:f:39:9d:af:82:43:22:8f:90:52:9d:89:e7:45:97:df:f3:f2:bc:7b:3a:db:89:e:34:fd:18:62:25:1b:ef:77:aa:c6:6c:99:36:3a:84:d6:9c:2a:34:8c:7f:f4:bb:c9:a5:9a:6c:11:f2:cf:da:51:5e:1e:7f:90:27:34:de:b2:f3:15:4f:db:47:32:6b:a7" # noqa +FINGER_RSA_2K_OPENSSH = "2048 68:d1:72:01:bf:c0:0c:66:97:78:df:ce:75:74:46:d6" +FINGER_DSS_1K_OPENSSH = "1024 cf:1d:eb:d7:61:d3:12:94:c6:c0:c6:54:35:35:b0:82" +FINGER_EC_384_OPENSSH = "384 72:14:df:c1:9a:c3:e6:0e:11:29:d6:32:18:7b:ea:9b" RSA_PRIVATE_OUT = """\ -----BEGIN RSA PRIVATE KEY----- @@ -105,26 +131,39 @@ -----END EC PRIVATE KEY----- """ -x1234 = b'\x01\x02\x03\x04' +x1234 = b"\x01\x02\x03\x04" +TEST_KEY_BYTESTR_2 = "\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01#\x00\x00\x00\x81\x00\xd3\x8fV\xea\x07\x85\xa6k%\x8d<\x1f\xbc\x8dT\x98\xa5\x96$\xf3E#\xbe>\xbc\xd2\x93\x93\x87f\xceD\x18\xdb \x0c\xb3\xa1a\x96\xf8e#\xcc\xacS\x8a#\xefVlE\x83\x1epv\xc1o\x17M\xef\xdf\x89DUXL\xa6\x8b\xaa<\x06\x10\xd7\x93w\xec\xaf\xe2\xaf\x95\xd8\xfb\xd9\xbfw\xcb\x9f0)#y{\x10\x90\xaa\x85l\tPru\x8c\t\x19\xce\xa0\xf1\xd2\xdc\x8e/\x8b\xa8f\x9c0\xdey\x84\xd2F\xf7\xcbmm\x1f\x87" # noqa +TEST_KEY_BYTESTR_3 = "\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01#\x00\x00\x00\x00ӏV\x07k%<\x1fT$E#>ғfD\x18 \x0cae#̬S#VlE\x1epvo\x17M߉DUXL<\x06\x10דw\u2bd5ٿw˟0)#y{\x10l\tPru\t\x19Π\u070e/f0yFmm\x1f" # noqa -class KeyTest (unittest.TestCase): +class KeyTest(unittest.TestCase): def setUp(self): pass def tearDown(self): pass - def test_1_generate_key_bytes(self): - key = util.generate_key_bytes(md5, x1234, 'happy birthday', 30) - exp = b'\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64' + def assert_keyfile_is_encrypted(self, keyfile): + """ + A quick check that filename looks like an encrypted key. + """ + with open(keyfile, "r") as fh: + self.assertEqual( + fh.readline()[:-1], "-----BEGIN RSA PRIVATE KEY-----" + ) + self.assertEqual(fh.readline()[:-1], "Proc-Type: 4,ENCRYPTED") + self.assertEqual(fh.readline()[0:10], "DEK-Info: ") + + def test_generate_key_bytes(self): + key = util.generate_key_bytes(md5, x1234, "happy birthday", 30) + exp = b"\x61\xE1\xF2\x72\xF4\xC1\xC4\x56\x15\x86\xBD\x32\x24\x98\xC0\xE9\x24\x67\x27\x80\xF4\x7B\xB3\x7D\xDA\x7D\x54\x01\x9E\x64" # noqa self.assertEqual(exp, key) - def test_2_load_rsa(self): - key = RSAKey.from_private_key_file(test_path('test_rsa.key')) - self.assertEqual('ssh-rsa', key.get_name()) - exp_rsa = b(FINGER_RSA.split()[1].replace(':', '')) + def test_load_rsa(self): + key = RSAKey.from_private_key_file(_support("test_rsa.key")) + self.assertEqual("ssh-rsa", key.get_name()) + exp_rsa = b(FINGER_RSA.split()[1].replace(":", "")) my_rsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_rsa, my_rsa) self.assertEqual(PUB_RSA.split()[1], key.get_base64()) @@ -137,19 +176,31 @@ def test_2_load_rsa(self): key2 = RSAKey.from_private_key(s) self.assertEqual(key, key2) - def test_3_load_rsa_password(self): - key = RSAKey.from_private_key_file(test_path('test_rsa_password.key'), 'television') - self.assertEqual('ssh-rsa', key.get_name()) - exp_rsa = b(FINGER_RSA.split()[1].replace(':', '')) + def test_load_rsa_transmutes_crypto_exceptions(self): + # TODO: nix unittest for pytest + for exception in (TypeError("onoz"), UnsupportedAlgorithm("oops")): + with patch( + "paramiko.rsakey.serialization.load_der_private_key" + ) as loader: + loader.side_effect = exception + with pytest.raises(SSHException, match=str(exception)): + RSAKey.from_private_key_file(_support("test_rsa.key")) + + def test_load_rsa_password(self): + key = RSAKey.from_private_key_file( + _support("test_rsa_password.key"), "television" + ) + self.assertEqual("ssh-rsa", key.get_name()) + exp_rsa = b(FINGER_RSA.split()[1].replace(":", "")) my_rsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_rsa, my_rsa) self.assertEqual(PUB_RSA.split()[1], key.get_base64()) self.assertEqual(1024, key.get_bits()) - def test_4_load_dss(self): - key = DSSKey.from_private_key_file(test_path('test_dss.key')) - self.assertEqual('ssh-dss', key.get_name()) - exp_dss = b(FINGER_DSS.split()[1].replace(':', '')) + def test_load_dss(self): + key = DSSKey.from_private_key_file(_support("test_dss.key")) + self.assertEqual("ssh-dss", key.get_name()) + exp_dss = b(FINGER_DSS.split()[1].replace(":", "")) my_dss = hexlify(key.get_fingerprint()) self.assertEqual(exp_dss, my_dss) self.assertEqual(PUB_DSS.split()[1], key.get_base64()) @@ -162,106 +213,118 @@ def test_4_load_dss(self): key2 = DSSKey.from_private_key(s) self.assertEqual(key, key2) - def test_5_load_dss_password(self): - key = DSSKey.from_private_key_file(test_path('test_dss_password.key'), 'television') - self.assertEqual('ssh-dss', key.get_name()) - exp_dss = b(FINGER_DSS.split()[1].replace(':', '')) + def test_load_dss_password(self): + key = DSSKey.from_private_key_file( + _support("test_dss_password.key"), "television" + ) + self.assertEqual("ssh-dss", key.get_name()) + exp_dss = b(FINGER_DSS.split()[1].replace(":", "")) my_dss = hexlify(key.get_fingerprint()) self.assertEqual(exp_dss, my_dss) self.assertEqual(PUB_DSS.split()[1], key.get_base64()) self.assertEqual(1024, key.get_bits()) - def test_6_compare_rsa(self): + def test_compare_rsa(self): # verify that the private & public keys compare equal - key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + key = RSAKey.from_private_key_file(_support("test_rsa.key")) self.assertEqual(key, key) pub = RSAKey(data=key.asbytes()) self.assertTrue(key.can_sign()) self.assertTrue(not pub.can_sign()) self.assertEqual(key, pub) - def test_7_compare_dss(self): + def test_compare_dss(self): # verify that the private & public keys compare equal - key = DSSKey.from_private_key_file(test_path('test_dss.key')) + key = DSSKey.from_private_key_file(_support("test_dss.key")) self.assertEqual(key, key) pub = DSSKey(data=key.asbytes()) self.assertTrue(key.can_sign()) self.assertTrue(not pub.can_sign()) self.assertEqual(key, pub) - def test_8_sign_rsa(self): - # verify that the rsa private key can sign and verify - key = RSAKey.from_private_key_file(test_path('test_rsa.key')) - msg = key.sign_ssh_data(b'ice weasels') - self.assertTrue(type(msg) is Message) + def _sign_and_verify_rsa(self, algorithm, saved_sig): + key = RSAKey.from_private_key_file(_support("test_rsa.key")) + msg = key.sign_ssh_data(b"ice weasels", algorithm) + assert isinstance(msg, Message) msg.rewind() - self.assertEqual('ssh-rsa', msg.get_text()) - sig = bytes().join([byte_chr(int(x, 16)) for x in SIGNED_RSA.split(':')]) - self.assertEqual(sig, msg.get_binary()) + assert msg.get_text() == algorithm + expected = bytes().join( + [byte_chr(int(x, 16)) for x in saved_sig.split(":")] + ) + assert msg.get_binary() == expected msg.rewind() pub = RSAKey(data=key.asbytes()) - self.assertTrue(pub.verify_ssh_sig(b'ice weasels', msg)) + self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg)) + + def test_sign_and_verify_ssh_rsa(self): + self._sign_and_verify_rsa("ssh-rsa", SIGNED_RSA) - def test_9_sign_dss(self): + def test_sign_and_verify_rsa_sha2_512(self): + self._sign_and_verify_rsa("rsa-sha2-512", SIGNED_RSA_512) + + def test_sign_and_verify_rsa_sha2_256(self): + self._sign_and_verify_rsa("rsa-sha2-256", SIGNED_RSA_256) + + def test_sign_dss(self): # verify that the dss private key can sign and verify - key = DSSKey.from_private_key_file(test_path('test_dss.key')) - msg = key.sign_ssh_data(b'ice weasels') + key = DSSKey.from_private_key_file(_support("test_dss.key")) + msg = key.sign_ssh_data(b"ice weasels") self.assertTrue(type(msg) is Message) msg.rewind() - self.assertEqual('ssh-dss', msg.get_text()) + self.assertEqual("ssh-dss", msg.get_text()) # can't do the same test as we do for RSA, because DSS signatures # are usually different each time. but we can test verification # anyway so it's ok. self.assertEqual(40, len(msg.get_binary())) msg.rewind() pub = DSSKey(data=key.asbytes()) - self.assertTrue(pub.verify_ssh_sig(b'ice weasels', msg)) + self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg)) - def test_A_generate_rsa(self): + def test_generate_rsa(self): key = RSAKey.generate(1024) - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) - def test_B_generate_dss(self): + def test_generate_dss(self): key = DSSKey.generate(1024) - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) - def test_C_generate_ecdsa(self): + def test_generate_ecdsa(self): key = ECDSAKey.generate() - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) self.assertEqual(key.get_bits(), 256) - self.assertEqual(key.get_name(), 'ecdsa-sha2-nistp256') + self.assertEqual(key.get_name(), "ecdsa-sha2-nistp256") key = ECDSAKey.generate(bits=256) - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) self.assertEqual(key.get_bits(), 256) - self.assertEqual(key.get_name(), 'ecdsa-sha2-nistp256') + self.assertEqual(key.get_name(), "ecdsa-sha2-nistp256") key = ECDSAKey.generate(bits=384) - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) self.assertEqual(key.get_bits(), 384) - self.assertEqual(key.get_name(), 'ecdsa-sha2-nistp384') + self.assertEqual(key.get_name(), "ecdsa-sha2-nistp384") key = ECDSAKey.generate(bits=521) - msg = key.sign_ssh_data(b'jerri blank') + msg = key.sign_ssh_data(b"jerri blank") msg.rewind() - self.assertTrue(key.verify_ssh_sig(b'jerri blank', msg)) + self.assertTrue(key.verify_ssh_sig(b"jerri blank", msg)) self.assertEqual(key.get_bits(), 521) - self.assertEqual(key.get_name(), 'ecdsa-sha2-nistp521') + self.assertEqual(key.get_name(), "ecdsa-sha2-nistp521") - def test_10_load_ecdsa_256(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_256.key')) - self.assertEqual('ecdsa-sha2-nistp256', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(':', '')) + def test_load_ecdsa_256(self): + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_256.key")) + self.assertEqual("ecdsa-sha2-nistp256", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_256.split()[1], key.get_base64()) @@ -274,31 +337,33 @@ def test_10_load_ecdsa_256(self): key2 = ECDSAKey.from_private_key(s) self.assertEqual(key, key2) - def test_11_load_ecdsa_password_256(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_password_256.key'), b'television') - self.assertEqual('ecdsa-sha2-nistp256', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(':', '')) + def test_load_ecdsa_password_256(self): + key = ECDSAKey.from_private_key_file( + _support("test_ecdsa_password_256.key"), b"television" + ) + self.assertEqual("ecdsa-sha2-nistp256", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_256.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_256.split()[1], key.get_base64()) self.assertEqual(256, key.get_bits()) - def test_12_compare_ecdsa_256(self): + def test_compare_ecdsa_256(self): # verify that the private & public keys compare equal - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_256.key')) + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_256.key")) self.assertEqual(key, key) pub = ECDSAKey(data=key.asbytes()) self.assertTrue(key.can_sign()) self.assertTrue(not pub.can_sign()) self.assertEqual(key, pub) - def test_13_sign_ecdsa_256(self): + def test_sign_ecdsa_256(self): # verify that the rsa private key can sign and verify - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_256.key')) - msg = key.sign_ssh_data(b'ice weasels') + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_256.key")) + msg = key.sign_ssh_data(b"ice weasels") self.assertTrue(type(msg) is Message) msg.rewind() - self.assertEqual('ecdsa-sha2-nistp256', msg.get_text()) + self.assertEqual("ecdsa-sha2-nistp256", msg.get_text()) # ECDSA signatures, like DSS signatures, tend to be different # each time, so we can't compare against a "known correct" # signature. @@ -306,12 +371,12 @@ def test_13_sign_ecdsa_256(self): msg.rewind() pub = ECDSAKey(data=key.asbytes()) - self.assertTrue(pub.verify_ssh_sig(b'ice weasels', msg)) + self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg)) - def test_14_load_ecdsa_384(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_384.key')) - self.assertEqual('ecdsa-sha2-nistp384', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(':', '')) + def test_load_ecdsa_384(self): + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key")) + self.assertEqual("ecdsa-sha2-nistp384", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_384.split()[1], key.get_base64()) @@ -324,31 +389,44 @@ def test_14_load_ecdsa_384(self): key2 = ECDSAKey.from_private_key(s) self.assertEqual(key, key2) - def test_15_load_ecdsa_password_384(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_password_384.key'), b'television') - self.assertEqual('ecdsa-sha2-nistp384', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(':', '')) + def test_load_ecdsa_password_384(self): + key = ECDSAKey.from_private_key_file( + _support("test_ecdsa_password_384.key"), b"television" + ) + self.assertEqual("ecdsa-sha2-nistp384", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_384.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_384.split()[1], key.get_base64()) self.assertEqual(384, key.get_bits()) - def test_16_compare_ecdsa_384(self): + def test_load_ecdsa_transmutes_crypto_exceptions(self): + path = _support("test_ecdsa_256.key") + # TODO: nix unittest for pytest + for exception in (TypeError("onoz"), UnsupportedAlgorithm("oops")): + with patch( + "paramiko.ecdsakey.serialization.load_der_private_key" + ) as loader: + loader.side_effect = exception + with pytest.raises(SSHException, match=str(exception)): + ECDSAKey.from_private_key_file(path) + + def test_compare_ecdsa_384(self): # verify that the private & public keys compare equal - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_384.key')) + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key")) self.assertEqual(key, key) pub = ECDSAKey(data=key.asbytes()) self.assertTrue(key.can_sign()) self.assertTrue(not pub.can_sign()) self.assertEqual(key, pub) - def test_17_sign_ecdsa_384(self): + def test_sign_ecdsa_384(self): # verify that the rsa private key can sign and verify - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_384.key')) - msg = key.sign_ssh_data(b'ice weasels') + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_384.key")) + msg = key.sign_ssh_data(b"ice weasels") self.assertTrue(type(msg) is Message) msg.rewind() - self.assertEqual('ecdsa-sha2-nistp384', msg.get_text()) + self.assertEqual("ecdsa-sha2-nistp384", msg.get_text()) # ECDSA signatures, like DSS signatures, tend to be different # each time, so we can't compare against a "known correct" # signature. @@ -356,12 +434,12 @@ def test_17_sign_ecdsa_384(self): msg.rewind() pub = ECDSAKey(data=key.asbytes()) - self.assertTrue(pub.verify_ssh_sig(b'ice weasels', msg)) + self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg)) - def test_18_load_ecdsa_521(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_521.key')) - self.assertEqual('ecdsa-sha2-nistp521', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(':', '')) + def test_load_ecdsa_521(self): + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key")) + self.assertEqual("ecdsa-sha2-nistp521", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_521.split()[1], key.get_base64()) @@ -377,31 +455,33 @@ def test_18_load_ecdsa_521(self): key2 = ECDSAKey.from_private_key(s) self.assertEqual(key, key2) - def test_19_load_ecdsa_password_521(self): - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_password_521.key'), b'television') - self.assertEqual('ecdsa-sha2-nistp521', key.get_name()) - exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(':', '')) + def test_load_ecdsa_password_521(self): + key = ECDSAKey.from_private_key_file( + _support("test_ecdsa_password_521.key"), b"television" + ) + self.assertEqual("ecdsa-sha2-nistp521", key.get_name()) + exp_ecdsa = b(FINGER_ECDSA_521.split()[1].replace(":", "")) my_ecdsa = hexlify(key.get_fingerprint()) self.assertEqual(exp_ecdsa, my_ecdsa) self.assertEqual(PUB_ECDSA_521.split()[1], key.get_base64()) self.assertEqual(521, key.get_bits()) - def test_20_compare_ecdsa_521(self): + def test_compare_ecdsa_521(self): # verify that the private & public keys compare equal - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_521.key')) + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key")) self.assertEqual(key, key) pub = ECDSAKey(data=key.asbytes()) self.assertTrue(key.can_sign()) self.assertTrue(not pub.can_sign()) self.assertEqual(key, pub) - def test_21_sign_ecdsa_521(self): + def test_sign_ecdsa_521(self): # verify that the rsa private key can sign and verify - key = ECDSAKey.from_private_key_file(test_path('test_ecdsa_521.key')) - msg = key.sign_ssh_data(b'ice weasels') + key = ECDSAKey.from_private_key_file(_support("test_ecdsa_521.key")) + msg = key.sign_ssh_data(b"ice weasels") self.assertTrue(type(msg) is Message) msg.rewind() - self.assertEqual('ecdsa-sha2-nistp521', msg.get_text()) + self.assertEqual("ecdsa-sha2-nistp521", msg.get_text()) # ECDSA signatures, like DSS signatures, tend to be different # each time, so we can't compare against a "known correct" # signature. @@ -409,21 +489,266 @@ def test_21_sign_ecdsa_521(self): msg.rewind() pub = ECDSAKey(data=key.asbytes()) - self.assertTrue(pub.verify_ssh_sig(b'ice weasels', msg)) + self.assertTrue(pub.verify_ssh_sig(b"ice weasels", msg)) + + def test_load_openssh_format_RSA_key(self): + key = RSAKey.from_private_key_file( + _support("test_rsa_openssh.key"), b"television" + ) + self.assertEqual("ssh-rsa", key.get_name()) + self.assertEqual(PUB_RSA_2K_OPENSSH.split()[1], key.get_base64()) + self.assertEqual(2048, key.get_bits()) + exp_rsa = b(FINGER_RSA_2K_OPENSSH.split()[1].replace(":", "")) + my_rsa = hexlify(key.get_fingerprint()) + self.assertEqual(exp_rsa, my_rsa) + + def test_loading_openssh_RSA_keys_uses_correct_p_q(self): + # Re #1723 - not the most elegant test but given how deep it is... + with patch( + "paramiko.rsakey.rsa.RSAPrivateNumbers", wraps=RSAPrivateNumbers + ) as spy: + # Load key + RSAKey.from_private_key_file( + _support("test_rsa_openssh.key"), b"television" + ) + # Ensure spy saw the correct P and Q values as derived from + # hardcoded test private key value + kwargs = spy.call_args[1] + assert kwargs["p"] == RSA_2K_OPENSSH_P + assert kwargs["q"] == RSA_2K_OPENSSH_Q + + def test_load_openssh_format_DSS_key(self): + key = DSSKey.from_private_key_file( + _support("test_dss_openssh.key"), b"television" + ) + self.assertEqual("ssh-dss", key.get_name()) + self.assertEqual(PUB_DSS_1K_OPENSSH.split()[1], key.get_base64()) + self.assertEqual(1024, key.get_bits()) + exp_rsa = b(FINGER_DSS_1K_OPENSSH.split()[1].replace(":", "")) + my_rsa = hexlify(key.get_fingerprint()) + self.assertEqual(exp_rsa, my_rsa) + + def test_load_openssh_format_EC_key(self): + key = ECDSAKey.from_private_key_file( + _support("test_ecdsa_384_openssh.key"), b"television" + ) + self.assertEqual("ecdsa-sha2-nistp384", key.get_name()) + self.assertEqual(PUB_EC_384_OPENSSH.split()[1], key.get_base64()) + self.assertEqual(384, key.get_bits()) + exp_fp = b(FINGER_EC_384_OPENSSH.split()[1].replace(":", "")) + my_fp = hexlify(key.get_fingerprint()) + self.assertEqual(exp_fp, my_fp) def test_salt_size(self): # Read an existing encrypted private key - file_ = test_path('test_rsa_password.key') - password = 'television' - newfile = file_ + '.new' - newpassword = 'radio' + file_ = _support("test_rsa_password.key") + password = "television" + newfile = file_ + ".new" + newpassword = "radio" key = RSAKey(filename=file_, password=password) # Write out a newly re-encrypted copy with a new password. # When the bug under test exists, this will ValueError. try: key.write_private_key_file(newfile, password=newpassword) + self.assert_keyfile_is_encrypted(newfile) # Verify the inner key data still matches (when no ValueError) key2 = RSAKey(filename=newfile, password=newpassword) self.assertEqual(key, key2) finally: os.remove(newfile) + + def test_load_openssh_format_RSA_nopad(self): + # check just not exploding with 'Invalid key' + RSAKey.from_private_key_file(_support("test_rsa_openssh_nopad.key")) + + def test_stringification(self): + key = RSAKey.from_private_key_file(_support("test_rsa.key")) + comparable = TEST_KEY_BYTESTR_2 if PY2 else TEST_KEY_BYTESTR_3 + self.assertEqual(str(key), comparable) + + def test_ed25519(self): + key1 = Ed25519Key.from_private_key_file(_support("test_ed25519.key")) + key2 = Ed25519Key.from_private_key_file( + _support("test_ed25519_password.key"), b"abc123" + ) + self.assertNotEqual(key1.asbytes(), key2.asbytes()) + + def test_ed25519_funky_padding(self): + # Proves #1306 by just not exploding with 'Invalid key'. + Ed25519Key.from_private_key_file( + _support("test_ed25519-funky-padding.key") + ) + + def test_ed25519_funky_padding_with_passphrase(self): + # Proves #1306 by just not exploding with 'Invalid key'. + Ed25519Key.from_private_key_file( + _support("test_ed25519-funky-padding_password.key"), b"asdf" + ) + + def test_ed25519_compare(self): + # verify that the private & public keys compare equal + key = Ed25519Key.from_private_key_file(_support("test_ed25519.key")) + self.assertEqual(key, key) + pub = Ed25519Key(data=key.asbytes()) + self.assertTrue(key.can_sign()) + self.assertTrue(not pub.can_sign()) + self.assertEqual(key, pub) + + # No point testing on systems that never exhibited the bug originally + @pytest.mark.skipif( + not is_low_entropy(), reason="Not a low-entropy system" + ) + def test_ed25519_32bit_collision(self): + # Re: 2021.10.19 security report email: two different private keys + # which Paramiko compared as equal on low-entropy platforms. + original = Ed25519Key.from_private_key_file( + _support("badhash_key1.ed25519.key") + ) + generated = Ed25519Key.from_private_key_file( + _support("badhash_key2.ed25519.key") + ) + assert original != generated + + def keys(self): + for key_class, filename in [ + (RSAKey, "test_rsa.key"), + (DSSKey, "test_dss.key"), + (ECDSAKey, "test_ecdsa_256.key"), + (Ed25519Key, "test_ed25519.key"), + ]: + key1 = key_class.from_private_key_file(_support(filename)) + key2 = key_class.from_private_key_file(_support(filename)) + yield key1, key2 + + def test_keys_are_comparable(self): + for key1, key2 in self.keys(): + assert key1 == key2 + + def test_keys_are_hashable(self): + # NOTE: this isn't a great test due to hashseed randomization under + # Python 3 preventing use of static values, but it does still prove + # that __hash__ is implemented/doesn't explode & works across instances + for key1, key2 in self.keys(): + assert hash(key1) == hash(key2) + + def test_ed25519_nonbytes_password(self): + # https://github.com/paramiko/paramiko/issues/1039 + Ed25519Key.from_private_key_file( + _support("test_ed25519_password.key"), + # NOTE: not a bytes. Amusingly, the test above for same key DOES + # explicitly cast to bytes...code smell! + "abc123", + ) + # No exception -> it's good. Meh. + + def test_ed25519_load_from_file_obj(self): + with open(_support("test_ed25519.key")) as pkey_fileobj: + key = Ed25519Key.from_private_key(pkey_fileobj) + self.assertEqual(key, key) + self.assertTrue(key.can_sign()) + + def test_keyfile_is_actually_encrypted(self): + # Read an existing encrypted private key + file_ = _support("test_rsa_password.key") + password = "television" + newfile = file_ + ".new" + newpassword = "radio" + key = RSAKey(filename=file_, password=password) + # Write out a newly re-encrypted copy with a new password. + # When the bug under test exists, this will ValueError. + try: + key.write_private_key_file(newfile, password=newpassword) + self.assert_keyfile_is_encrypted(newfile) + finally: + os.remove(newfile) + + def test_certificates(self): + # NOTE: we also test 'live' use of cert auth for all key types in + # test_client.py; this and nearby cert tests are more about the gritty + # details. + # PKey.load_certificate + key_path = _support(os.path.join("cert_support", "test_rsa.key")) + key = RSAKey.from_private_key_file(key_path) + self.assertTrue(key.public_blob is None) + cert_path = _support( + os.path.join("cert_support", "test_rsa.key-cert.pub") + ) + key.load_certificate(cert_path) + self.assertTrue(key.public_blob is not None) + self.assertEqual( + key.public_blob.key_type, "ssh-rsa-cert-v01@openssh.com" + ) + self.assertEqual(key.public_blob.comment, "test_rsa.key.pub") + # Delve into blob contents, for test purposes + msg = Message(key.public_blob.key_blob) + self.assertEqual(msg.get_text(), "ssh-rsa-cert-v01@openssh.com") + msg.get_string() + e = msg.get_mpint() + n = msg.get_mpint() + self.assertEqual(e, key.public_numbers.e) + self.assertEqual(n, key.public_numbers.n) + # Serial number + self.assertEqual(msg.get_int64(), 1234) + + # Prevented from loading certificate that doesn't match + key_path = _support(os.path.join("cert_support", "test_ed25519.key")) + key1 = Ed25519Key.from_private_key_file(key_path) + self.assertRaises( + ValueError, + key1.load_certificate, + _support("test_rsa.key-cert.pub"), + ) + + @patch("paramiko.pkey.os") + def _test_keyfile_race(self, os_, exists): + # Re: CVE-2022-24302 + password = "television" + newpassword = "radio" + source = _support("test_ecdsa_384.key") + new = source + ".new" + # Mock setup + os_.path.exists.return_value = exists + # Attach os flag values to mock + for attr, value in vars(os).items(): + if attr.startswith("O_"): + setattr(os_, attr, value) + # Load fixture key + key = ECDSAKey(filename=source, password=password) + key._write_private_key = Mock() + # Write out in new location + key.write_private_key_file(new, password=newpassword) + # Expected open via os module + os_.open.assert_called_once_with(new, flags=os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode=o600) + os_.fdopen.assert_called_once_with(os_.open.return_value, mode="w") + # Old chmod still around for backwards compat + os_.chmod.assert_called_once_with(new, o600) + assert ( + key._write_private_key.call_args[0][0] + == os_.fdopen.return_value.__enter__.return_value + ) + + def test_new_keyfiles_avoid_file_descriptor_race_on_chmod(self): + self._test_keyfile_race(exists=False) + + def test_existing_keyfiles_still_work_ok(self): + self._test_keyfile_race(exists=True) + + def test_new_keyfiles_avoid_descriptor_race_integration(self): + # Integration-style version of above + password = "television" + newpassword = "radio" + source = _support("test_ecdsa_384.key") + new = source + ".new" + # Load fixture key + key = ECDSAKey(filename=source, password=password) + try: + # Write out in new location + key.write_private_key_file(new, password=newpassword) + # Test mode + assert stat.S_IMODE(os.stat(new).st_mode) == o600 + # Prove can open with new password + reloaded = ECDSAKey(filename=new, password=newpassword) + assert reloaded == key + finally: + if os.path.exists(new): + os.unlink(new) diff --git a/tests/test_proxy.py b/tests/test_proxy.py new file mode 100644 index 000000000..2e0b0e514 --- /dev/null +++ b/tests/test_proxy.py @@ -0,0 +1,144 @@ +import signal +import socket + +from mock import patch +from pytest import raises + +from paramiko import ProxyCommand, ProxyCommandFailure + + +class TestProxyCommand(object): + @patch("paramiko.proxy.subprocess") + def test_init_takes_command_string(self, subprocess): + ProxyCommand(command_line="do a thing") + subprocess.Popen.assert_called_once_with( + ["do", "a", "thing"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=0, + ) + + @patch("paramiko.proxy.subprocess.Popen") + def test_send_writes_to_process_stdin_returning_length(self, Popen): + proxy = ProxyCommand("hi") + written = proxy.send(b"data") + Popen.return_value.stdin.write.assert_called_once_with(b"data") + assert written == len(b"data") + + @patch("paramiko.proxy.subprocess.Popen") + def test_send_raises_ProxyCommandFailure_on_error(self, Popen): + Popen.return_value.stdin.write.side_effect = IOError(0, "whoops") + with raises(ProxyCommandFailure) as info: + ProxyCommand("hi").send("data") + assert info.value.command == "hi" + assert info.value.error == "whoops" + + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.read") + @patch("paramiko.proxy.select") + def test_recv_reads_from_process_stdout_returning_bytes( + self, select, os_read, Popen + ): + stdout = Popen.return_value.stdout + select.return_value = [stdout], None, None + fileno = stdout.fileno.return_value + # Intentionally returning <5 at a time sometimes + os_read.side_effect = [b"was", b"te", b"of ti", b"me"] + proxy = ProxyCommand("hi") + data = proxy.recv(5) + assert data == b"waste" + assert [x[0] for x in os_read.call_args_list] == [ + (fileno, 5), + (fileno, 2), + ] + + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.read") + @patch("paramiko.proxy.select") + def test_recv_returns_buffer_on_timeout_if_any_read( + self, select, os_read, Popen + ): + stdout = Popen.return_value.stdout + select.return_value = [stdout], None, None + fileno = stdout.fileno.return_value + os_read.side_effect = [b"was", socket.timeout] + proxy = ProxyCommand("hi") + data = proxy.recv(5) + assert data == b"was" # not b"waste" + assert os_read.call_args[0] == (fileno, 2) + + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.read") + @patch("paramiko.proxy.select") + def test_recv_raises_timeout_if_nothing_read(self, select, os_read, Popen): + stdout = Popen.return_value.stdout + select.return_value = [stdout], None, None + fileno = stdout.fileno.return_value + os_read.side_effect = socket.timeout + proxy = ProxyCommand("hi") + with raises(socket.timeout): + proxy.recv(5) + assert os_read.call_args[0] == (fileno, 5) + + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.read") + @patch("paramiko.proxy.select") + def test_recv_raises_ProxyCommandFailure_on_non_timeout_error( + self, select, os_read, Popen + ): + select.return_value = [Popen.return_value.stdout], None, None + os_read.side_effect = IOError(0, "whoops") + with raises(ProxyCommandFailure) as info: + ProxyCommand("hi").recv(5) + assert info.value.command == "hi" + assert info.value.error == "whoops" + + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.kill") + def test_close_kills_subprocess(self, os_kill, Popen): + proxy = ProxyCommand("hi") + proxy.close() + os_kill.assert_called_once_with(Popen.return_value.pid, signal.SIGTERM) + + @patch("paramiko.proxy.subprocess.Popen") + def test_closed_exposes_whether_subprocess_has_exited(self, Popen): + proxy = ProxyCommand("hi") + Popen.return_value.returncode = None + assert proxy.closed is False + assert proxy._closed is False + Popen.return_value.returncode = 0 + assert proxy.closed is True + assert proxy._closed is True + + @patch("paramiko.proxy.time.time") + @patch("paramiko.proxy.subprocess.Popen") + @patch("paramiko.proxy.os.read") + @patch("paramiko.proxy.select") + def test_timeout_affects_whether_timeout_is_raised( + self, select, os_read, Popen, time + ): + stdout = Popen.return_value.stdout + select.return_value = [stdout], None, None + # Base case: None timeout means no timing out + os_read.return_value = b"meh" + proxy = ProxyCommand("yello") + assert proxy.timeout is None + # Implicit 'no raise' check + assert proxy.recv(3) == b"meh" + # Use settimeout to set timeout, and it is honored + time.side_effect = [0, 10] # elapsed > 7 + proxy = ProxyCommand("ohnoz") + proxy.settimeout(7) + assert proxy.timeout == 7 + with raises(socket.timeout): + proxy.recv(3) + + @patch("paramiko.proxy.subprocess", new=None) + @patch("paramiko.proxy.subprocess_import_error", new=ImportError("meh")) + def test_raises_subprocess_ImportErrors_at_runtime(self): + # Not an ideal test, but I don't know of a non-bad way to fake out + # module-time ImportErrors. So we mock the symptoms. Meh! + with raises(ImportError) as info: + ProxyCommand("hi!!!") + assert str(info.value) == "meh" diff --git a/tests/test_rsa.key.pub b/tests/test_rsa.key.pub new file mode 100644 index 000000000..bfa1e150b --- /dev/null +++ b/tests/test_rsa.key.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA049W6geFpmsljTwfvI1UmKWWJPNFI74+vNKTk4dmzkQY2yAMs6FhlvhlI8ysU4oj71ZsRYMecHbBbxdN79+JRFVYTKaLqjwGENeTd+yv4q+V2PvZv3fLnzApI3l7EJCqhWwJUHJ1jAkZzqDx0tyOL4uoZpww3nmE0kb3y21tH4c= diff --git a/tests/test_rsa_openssh.key b/tests/test_rsa_openssh.key new file mode 100644 index 000000000..6077c1032 --- /dev/null +++ b/tests/test_rsa_openssh.key @@ -0,0 +1,28 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jYmMAAAAGYmNyeXB0AAAAGAAAABD0R3hOFS +FMb2SJeo5h8QPNAAAAEAAAAAEAAAEXAAAAB3NzaC1yc2EAAAADAQABAAABAQDF+Dpr54DX +0WdeTDpNAMdkCWEkl3OXtNgf58qlN1gX572OLBqLf0zT4bHstUEpU3piazph/rSWcUMuBo +D46tZ6jiH7H9b9Pem2eYQWaELDDkM+v9BMbEy5rMbFRLol5OtEvPFqneyEAanPOgvd8t3y +yhSev9QVusakzJ8j8LGgrA8huYZ+Srnw0shEWLG70KUKCh3rG0QIvA8nfhtUOisr2Gp+F0 +YxMGb5gwBlQYAYE5l6u1SjZ7hNjyNosjK+wRBFgFFBYVpkZKJgWoK9w4ijFyzMZTucnZMq +KOKAjIJvHfKBf2/cEfYxSq1EndqTqjYsd9T7/s2vcn1OH5a0wkERAAAD0JnzCJYfDeiUQ6 +9LOAb6/NnhKvFjCdBYal60MfLcLBHvzHLJvTneQ4f1Vknq8xEVmRba7SDSfwaEybP/1FsP +SGH6FNKA5gKllemgmcaUVr3wtNPtjX4WgsyHcwCRgHmOiyNrUj0OZR5wbZabHIIyirl4wa +LBz8Jb3GalKEagtyWsBKDCKHCFNzh8xmsT1SWhnC7baRyC8e3krQm9hGbNhpj6Q5AtN3ql +wBVamUp0eKxkt70mKBKI4v3DR8KqrEndeK6d0cegVEkE67fqa99a5J3uSDC8mglKrHiKEs +dU1dh/bOF/H3aFpINlRwvlZ95Opby7rG0BHgbZONq0+VUnABVzNTM5Xd5UKjjCF28CrQBf +XS6WeHeUx2zHtOmL1xdePk+Bii+SSUl3pLa4SDwX4nV95cSPx8vMm8dJEruxad6+MPoSuy +Oyho89jqUTSgC/RPejuTgrnB3WbzE5SJb+V3zMata0J1wxbNfYKG9U+VucUZhP4+jzfNqH +B/v8JqtuxnqR8NjPsK2+8wJxebL2KVNjKOm//6P3KSDsavpscGpVWOM06zUlwWCB26W3pP +X/+xO9aR5wiBteFKoJG1waziIjqhOJSmvq+I/texUKEUd/eEFNt10Ubc0zy0sRYVN8rIRJ +masQzCYuUylDzCa4ar1s4qngBZzWL2PRkPuXuhoHuT0J5no174GR6+6EAYZZhnq0tkYrhZ +Ar0tQ4CDlI235a3MPHzvABuwYuWys1tBuLAb+6Gc6CmCiQ+mhojfQUBYG5T65iRFA5UQsH +O1RLEC3yasxGcBI6d0J/fwOP/YLktNu3AeUumr0N9Xgf02DlBNwd+4GOI0LcQvl/3J8ppo +bamTppKPEZ2d32VNEO+Z6Zx5DlIVm5gDeMvIvdwap445VnhL3ZZH2NCkAcXM9+0WH+Quas +JCAMgPYiP9FzF+8Onmj2OmhgIVj/9eanhS3/GLrRC4xCvER2V7PwgB0I5qY110BPEttDyo +IvYE51kvtdW447SK7HZywJnkyw2RNm+29dvWJJwSQckUHuZkXEtmEPk0ePL3yf2NH5XYJc +pXX6Zac0KemCPIHr8l7GogE4Rb2BBTqddkegb9piz6QTAPcQnn+GuMFG06IBhUrgcMEQ8x +UOXYUUrT5HvSxWUcgaYH1nfC3bTWmDaodw8/HQKyF6c44rujO2s2NLFOCAyQMUNdhh3lfD +yHYLO7xYkP6xzzkpk+2lwBoeYdQdAwlKN/XqC8ZhBfwTdem/1hh1BpQJFbbFftWxU8gxxi +iuI+vmlsuIsxKoGCq8YXuophx62lo= +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_rsa_openssh_nopad.key b/tests/test_rsa_openssh_nopad.key new file mode 100644 index 000000000..61ac1b199 --- /dev/null +++ b/tests/test_rsa_openssh_nopad.key @@ -0,0 +1,27 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn +NhAAAAAwEAAQAAAQEAnyMwWSwrbJxxQZWMJO5xR6eAA9De4t3GViqDRaQt/BgsvzZ14SUz +aOL/A370fKxhx/JLIOOGA0o5B0/ct+CL7XFqMi5r5+iA9VcIeYKKtoAkrEvRnagNW0WVWx +thTnE01g8Pb7fDqzI2cBuBNZ2vGNm2m4UTGC8/kl/0ES1V3KqA7lPlTrkTYg9L/ornvVHc +c8gEbMwx9XXVRzbWiuDE176ojrudY9CZduVSOgW+HK3rKkqLBs/91jv0zUK0oqTQBLR7E2 +V2GWPDU4BjlHTtYr0jpKOGDr1DLu4+NiD/mX+tGMdH6ehbDii0kXmOUaZjs4OxuK3XA/gi +KZLdj1jQQwAAA7iNnvAVjZ7wFQAAAAdzc2gtcnNhAAABAQCfIzBZLCtsnHFBlYwk7nFHp4 +AD0N7i3cZWKoNFpC38GCy/NnXhJTNo4v8DfvR8rGHH8ksg44YDSjkHT9y34IvtcWoyLmvn +6ID1Vwh5goq2gCSsS9GdqA1bRZVbG2FOcTTWDw9vt8OrMjZwG4E1na8Y2babhRMYLz+SX/ +QRLVXcqoDuU+VOuRNiD0v+iue9UdxzyARszDH1ddVHNtaK4MTXvqiOu51j0Jl25VI6Bb4c +resqSosGz/3WO/TNQrSipNAEtHsTZXYZY8NTgGOUdO1ivSOko4YOvUMu7j42IP+Zf60Yx0 +fp6FsOKLSReY5RpmOzg7G4rdcD+CIpkt2PWNBDAAAAAwEAAQAAAQEAnmMbn+VCYxth7fC2 +R5u6y6J+201sSUiKOwCdHxdFXX+CKd4+fRPVkzM6tXQKSnwX5jXVaKqLm4KoOArYl3q6Sl +1zYParF2plz8oL+URgYzwvQ/1CaDP29zzOZptdwgESoWrj5kF0UlPrsrDtbTvAJm+qPCe6 +1XtRPpKaDO6eYr0PM2QTElZy3mDBUBvu816LdG/ZtnB9g5UsocT5mmhpHTHdjrpwNu5TBe +ACVodDn5Fu66OlrrnQi4IPCAWKJ1YuzEkZqLhs1L3oMHACsmzrLjzW74SjY4kWTTvGiC6i +tDoycycThk9EGLGNso99Q1fe84/OZUff7aI3yK9KvLL7oQAAAIEAh2+XrJXSBx/v9E3aJH +ncgQH1snXr7LcSRqcWicHdbm8JsOTT3TkyXHGlSZ2rr/Y0u5V1ZSO6roJLrAHsDJzx0x0U +xE/5mpzhD+yIKQwnWkZFLzYEnYDFdXDMzmghUIik9AW7n9dtS8UtVFGaL6Vs2YCOuLqeT9 +nZUkm3UUZ+7QIAAACBAM23DFjQ0/Op2ri7fJA2qFBdXqoJdNHuyYEIrKbB6XaaSUz52+IB +MbccxEz3vPsHh69tZoJ+xZNbFJe9wdmbF+DQpoukHkJnzpk/pUq8LjQMzZfwv41X8zqaq4 +AOA7g27Rk8aKewhCXjhkr0hHEaSiuqIIindFaFti5sQMi2mtkXAAAAgQDGCXkpuKZK61p9 +L6G5yZSQBCgVtm0iQEbyDXWHjy/GqLtxJjqdyaRK57hXGjbzgJJraSy+sNP9uv2QOvyZvB +3XaPWwUYVQ34WyibCqqUaPiHxX7T1lZV+asbwgbmSqYtH5dUEJ8zT572mCwxnRjX63PwDo +5vBbR/qAW5lvRYsltQAAAAFh +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/test_sftp.py b/tests/test_sftp.py old mode 100755 new mode 100644 index e4c2c3a35..6134d070d --- a/tests/test_sftp.py +++ b/tests/test_sftp.py @@ -26,22 +26,22 @@ import os import socket import sys -import threading -import unittest import warnings from binascii import hexlify from tempfile import mkstemp -import paramiko +import pytest + from paramiko.py3compat import PY2, b, u, StringIO from paramiko.common import o777, o600, o666, o644 -from tests.stub_sftp import StubServer, StubSFTPServer -from tests.loop import LoopSocket -from tests.util import test_path -import paramiko.util +from tests import requireNonAsciiLocale from paramiko.sftp_attr import SFTPAttributes -ARTICLE = ''' +from .util import needs_builtin +from .util import slow + + +ARTICLE = """ Insulin sensitivity and liver insulin receptor structure in ducks from two genera @@ -66,362 +66,310 @@ structure for liver insulin receptors and a clear difference in the number of liver insulin receptors. Their sensitivity to insulin is, however, similarly decreased compared with chicken. -''' +""" # Here is how unicode characters are encoded over 1 to 6 bytes in utf-8 -# U-00000000 - U-0000007F: 0xxxxxxx -# U-00000080 - U-000007FF: 110xxxxx 10xxxxxx -# U-00000800 - U-0000FFFF: 1110xxxx 10xxxxxx 10xxxxxx -# U-00010000 - U-001FFFFF: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx -# U-00200000 - U-03FFFFFF: 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx -# U-04000000 - U-7FFFFFFF: 1111110x 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx +# U-00000000 - U-0000007F: +# 0xxxxxxx +# U-00000080 - U-000007FF: +# 110xxxxx 10xxxxxx +# U-00000800 - U-0000FFFF: +# 1110xxxx 10xxxxxx 10xxxxxx +# U-00010000 - U-001FFFFF: +# 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx +# U-00200000 - U-03FFFFFF: +# 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx +# U-04000000 - U-7FFFFFFF: +# 1111110x 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx # Note that: hex(int('11000011',2)) == '0xc3' -# Thus, the following 2-bytes sequence is not valid utf8: "invalid continuation byte" -NON_UTF8_DATA = b'\xC3\xC3' - -FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000') - -sftp = None -tc = None -g_big_file_test = True -# we need to use eval(compile()) here because Py3.2 doesn't support the 'u' marker for unicode -# this test is the only line in the entire program that has to be treated specially to support Py3.2 -unicode_folder = eval(compile(r"u'\u00fcnic\u00f8de'" if PY2 else r"'\u00fcnic\u00f8de'", 'test_sftp.py', 'eval')) -utf8_folder = b'/\xc3\xbcnic\xc3\xb8\x64\x65' - - -def get_sftp(): - global sftp - return sftp - +# Thus, the following 2-bytes sequence is not valid utf8: "invalid continuation +# byte" +NON_UTF8_DATA = b"\xC3\xC3" -class SFTPTest (unittest.TestCase): - @staticmethod - def init(hostname, username, keyfile, passwd): - global sftp, tc +unicode_folder = u"\u00fcnic\u00f8de" if PY2 else "\u00fcnic\u00f8de" +utf8_folder = b"/\xc3\xbcnic\xc3\xb8\x64\x65" - t = paramiko.Transport(hostname) - tc = t - try: - key = paramiko.RSAKey.from_private_key_file(keyfile, passwd) - except paramiko.PasswordRequiredException: - sys.stderr.write('\n\nparamiko.RSAKey.from_private_key_file REQUIRES PASSWORD.\n') - sys.stderr.write('You have two options:\n') - sys.stderr.write('* Use the "-K" option to point to a different (non-password-protected)\n') - sys.stderr.write(' private key file.\n') - sys.stderr.write('* Use the "-P" option to provide the password needed to unlock this private\n') - sys.stderr.write(' key.\n') - sys.stderr.write('\n') - sys.exit(1) - try: - t.connect(username=username, pkey=key) - except paramiko.SSHException: - t.close() - sys.stderr.write('\n\nparamiko.Transport.connect FAILED.\n') - sys.stderr.write('There are several possible reasons why it might fail so quickly:\n\n') - sys.stderr.write('* The host to connect to (%s) is not a valid SSH server.\n' % hostname) - sys.stderr.write(' (Use the "-H" option to change the host.)\n') - sys.stderr.write('* The username to auth as (%s) is invalid.\n' % username) - sys.stderr.write(' (Use the "-U" option to change the username.)\n') - sys.stderr.write('* The private key given (%s) is not accepted by the server.\n' % keyfile) - sys.stderr.write(' (Use the "-K" option to provide a different key file.)\n') - sys.stderr.write('\n') - sys.exit(1) - sftp = paramiko.SFTP.from_transport(t) - - @staticmethod - def init_loopback(): - global sftp, tc - - socks = LoopSocket() - sockc = LoopSocket() - sockc.link(socks) - tc = paramiko.Transport(sockc) - ts = paramiko.Transport(socks) - - host_key = paramiko.RSAKey.from_private_key_file(test_path('test_rsa.key')) - ts.add_server_key(host_key) - event = threading.Event() - server = StubServer() - ts.set_subsystem_handler('sftp', paramiko.SFTPServer, StubSFTPServer) - ts.start_server(event, server) - tc.connect(username='slowdive', password='pygmalion') - event.wait(1.0) - - sftp = paramiko.SFTP.from_transport(tc) - - @staticmethod - def set_big_file_test(onoff): - global g_big_file_test - g_big_file_test = onoff - - def setUp(self): - global FOLDER - for i in range(1000): - FOLDER = FOLDER[:-3] + '%03d' % i - try: - sftp.mkdir(FOLDER) - break - except (IOError, OSError): - pass - - def tearDown(self): - #sftp.chdir() - sftp.rmdir(FOLDER) - def test_1_file(self): +@slow +class TestSFTP(object): + def test_file(self, sftp): """ verify that we can create a file. """ - f = sftp.open(FOLDER + '/test', 'w') + f = sftp.open(sftp.FOLDER + "/test", "w") try: - self.assertEqual(f.stat().st_size, 0) + assert f.stat().st_size == 0 finally: f.close() - sftp.remove(FOLDER + '/test') + sftp.remove(sftp.FOLDER + "/test") - def test_2_close(self): + def test_close(self, sftp): """ - verify that closing the sftp session doesn't do anything bad, and that - a new one can be opened. + Verify that SFTP session close() causes a socket error on next action. """ - global sftp sftp.close() - try: - sftp.open(FOLDER + '/test2', 'w') - self.fail('expected exception') - except: - pass - sftp = paramiko.SFTP.from_transport(tc) + with pytest.raises(socket.error, match="Socket is closed"): + sftp.open(sftp.FOLDER + "/test2", "w") - def test_2_sftp_can_be_used_as_context_manager(self): + def test_sftp_can_be_used_as_context_manager(self, sftp): """ verify that the sftp session is closed when exiting the context manager """ - global sftp with sftp: pass - try: - sftp.open(FOLDER + '/test2', 'w') - self.fail('expected exception') - except (EOFError, socket.error): - pass - finally: - sftp = paramiko.SFTP.from_transport(tc) + with pytest.raises(socket.error, match="Socket is closed"): + sftp.open(sftp.FOLDER + "/test2", "w") - def test_3_write(self): + def test_write(self, sftp): """ verify that a file can be created and written, and the size is correct. """ try: - with sftp.open(FOLDER + '/duck.txt', 'w') as f: + with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f: f.write(ARTICLE) - self.assertEqual(sftp.stat(FOLDER + '/duck.txt').st_size, 1483) + assert sftp.stat(sftp.FOLDER + "/duck.txt").st_size == 1483 finally: - sftp.remove(FOLDER + '/duck.txt') + sftp.remove(sftp.FOLDER + "/duck.txt") - def test_3_sftp_file_can_be_used_as_context_manager(self): + def test_sftp_file_can_be_used_as_context_manager(self, sftp): """ verify that an opened file can be used as a context manager """ try: - with sftp.open(FOLDER + '/duck.txt', 'w') as f: + with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f: f.write(ARTICLE) - self.assertEqual(sftp.stat(FOLDER + '/duck.txt').st_size, 1483) + assert sftp.stat(sftp.FOLDER + "/duck.txt").st_size == 1483 finally: - sftp.remove(FOLDER + '/duck.txt') + sftp.remove(sftp.FOLDER + "/duck.txt") - def test_4_append(self): + def test_append(self, sftp): """ verify that a file can be opened for append, and tell() still works. """ try: - with sftp.open(FOLDER + '/append.txt', 'w') as f: - f.write('first line\nsecond line\n') - self.assertEqual(f.tell(), 23) - - with sftp.open(FOLDER + '/append.txt', 'a+') as f: - f.write('third line!!!\n') - self.assertEqual(f.tell(), 37) - self.assertEqual(f.stat().st_size, 37) + with sftp.open(sftp.FOLDER + "/append.txt", "w") as f: + f.write("first line\nsecond line\n") + assert f.tell() == 23 + + with sftp.open(sftp.FOLDER + "/append.txt", "a+") as f: + f.write("third line!!!\n") + assert f.tell() == 37 + assert f.stat().st_size == 37 f.seek(-26, f.SEEK_CUR) - self.assertEqual(f.readline(), 'second line\n') + assert f.readline() == "second line\n" finally: - sftp.remove(FOLDER + '/append.txt') + sftp.remove(sftp.FOLDER + "/append.txt") - def test_5_rename(self): + def test_rename(self, sftp): """ verify that renaming a file works. """ try: - with sftp.open(FOLDER + '/first.txt', 'w') as f: - f.write('content!\n') - sftp.rename(FOLDER + '/first.txt', FOLDER + '/second.txt') + with sftp.open(sftp.FOLDER + "/first.txt", "w") as f: + f.write("content!\n") + sftp.rename( + sftp.FOLDER + "/first.txt", sftp.FOLDER + "/second.txt" + ) + with pytest.raises(IOError, match="No such file"): + sftp.open(sftp.FOLDER + "/first.txt", "r") + with sftp.open(sftp.FOLDER + "/second.txt", "r") as f: + f.seek(-6, f.SEEK_END) + assert u(f.read(4)) == "tent" + finally: + # TODO: this is gross, make some sort of 'remove if possible' / 'rm + # -f' a-like, jeez try: - sftp.open(FOLDER + '/first.txt', 'r') - self.assertTrue(False, 'no exception on reading nonexistent file') - except IOError: + sftp.remove(sftp.FOLDER + "/first.txt") + except: pass - with sftp.open(FOLDER + '/second.txt', 'r') as f: - f.seek(-6, f.SEEK_END) - self.assertEqual(u(f.read(4)), 'tent') + try: + sftp.remove(sftp.FOLDER + "/second.txt") + except: + pass + + def testa_posix_rename(self, sftp): + """Test posix-rename@openssh.com protocol extension.""" + try: + # first check that the normal rename works as specified + with sftp.open(sftp.FOLDER + "/a", "w") as f: + f.write("one") + sftp.rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b") + with sftp.open(sftp.FOLDER + "/a", "w") as f: + f.write("two") + with pytest.raises(IOError): # actual message seems generic + sftp.rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b") + + # now check with the posix_rename + sftp.posix_rename(sftp.FOLDER + "/a", sftp.FOLDER + "/b") + with sftp.open(sftp.FOLDER + "/b", "r") as f: + data = u(f.read()) + err = "Contents of renamed file not the same as original file" + assert "two" == data, err + finally: try: - sftp.remove(FOLDER + '/first.txt') + sftp.remove(sftp.FOLDER + "/a") except: pass try: - sftp.remove(FOLDER + '/second.txt') + sftp.remove(sftp.FOLDER + "/b") except: pass - def test_6_folder(self): + def test_folder(self, sftp): """ create a temporary folder, verify that we can create a file in it, then remove the folder and verify that we can't create a file in it anymore. """ - sftp.mkdir(FOLDER + '/subfolder') - sftp.open(FOLDER + '/subfolder/test', 'w').close() - sftp.remove(FOLDER + '/subfolder/test') - sftp.rmdir(FOLDER + '/subfolder') - try: - sftp.open(FOLDER + '/subfolder/test') - # shouldn't be able to create that file - self.assertTrue(False, 'no exception at dummy file creation') - except IOError: - pass + sftp.mkdir(sftp.FOLDER + "/subfolder") + sftp.open(sftp.FOLDER + "/subfolder/test", "w").close() + sftp.remove(sftp.FOLDER + "/subfolder/test") + sftp.rmdir(sftp.FOLDER + "/subfolder") + # shouldn't be able to create that file if dir removed + with pytest.raises(IOError, match="No such file"): + sftp.open(sftp.FOLDER + "/subfolder/test") - def test_7_listdir(self): + def test_listdir(self, sftp): """ verify that a folder can be created, a bunch of files can be placed in it, and those files show up in sftp.listdir. """ try: - sftp.open(FOLDER + '/duck.txt', 'w').close() - sftp.open(FOLDER + '/fish.txt', 'w').close() - sftp.open(FOLDER + '/tertiary.py', 'w').close() - - x = sftp.listdir(FOLDER) - self.assertEqual(len(x), 3) - self.assertTrue('duck.txt' in x) - self.assertTrue('fish.txt' in x) - self.assertTrue('tertiary.py' in x) - self.assertTrue('random' not in x) + sftp.open(sftp.FOLDER + "/duck.txt", "w").close() + sftp.open(sftp.FOLDER + "/fish.txt", "w").close() + sftp.open(sftp.FOLDER + "/tertiary.py", "w").close() + + x = sftp.listdir(sftp.FOLDER) + assert len(x) == 3 + assert "duck.txt" in x + assert "fish.txt" in x + assert "tertiary.py" in x + assert "random" not in x finally: - sftp.remove(FOLDER + '/duck.txt') - sftp.remove(FOLDER + '/fish.txt') - sftp.remove(FOLDER + '/tertiary.py') + sftp.remove(sftp.FOLDER + "/duck.txt") + sftp.remove(sftp.FOLDER + "/fish.txt") + sftp.remove(sftp.FOLDER + "/tertiary.py") - def test_7_5_listdir_iter(self): + def test_listdir_iter(self, sftp): """ listdir_iter version of above test """ try: - sftp.open(FOLDER + '/duck.txt', 'w').close() - sftp.open(FOLDER + '/fish.txt', 'w').close() - sftp.open(FOLDER + '/tertiary.py', 'w').close() - - x = [x.filename for x in sftp.listdir_iter(FOLDER)] - self.assertEqual(len(x), 3) - self.assertTrue('duck.txt' in x) - self.assertTrue('fish.txt' in x) - self.assertTrue('tertiary.py' in x) - self.assertTrue('random' not in x) + sftp.open(sftp.FOLDER + "/duck.txt", "w").close() + sftp.open(sftp.FOLDER + "/fish.txt", "w").close() + sftp.open(sftp.FOLDER + "/tertiary.py", "w").close() + + x = [x.filename for x in sftp.listdir_iter(sftp.FOLDER)] + assert len(x) == 3 + assert "duck.txt" in x + assert "fish.txt" in x + assert "tertiary.py" in x + assert "random" not in x + finally: + sftp.remove(sftp.FOLDER + "/duck.txt") + sftp.remove(sftp.FOLDER + "/fish.txt") + sftp.remove(sftp.FOLDER + "/tertiary.py") + + @requireNonAsciiLocale() + def test_listdir_in_locale(self, sftp): + """Test listdir under a locale that uses non-ascii text.""" + sftp.open(sftp.FOLDER + "/canard.txt", "w").close() + try: + folder_contents = sftp.listdir(sftp.FOLDER) + self.assertEqual(["canard.txt"], folder_contents) finally: - sftp.remove(FOLDER + '/duck.txt') - sftp.remove(FOLDER + '/fish.txt') - sftp.remove(FOLDER + '/tertiary.py') + sftp.remove(sftp.FOLDER + "/canard.txt") - def test_8_setstat(self): + def test_setstat(self, sftp): """ verify that the setstat functions (chown, chmod, utime, truncate) work. """ try: - with sftp.open(FOLDER + '/special', 'w') as f: - f.write('x' * 1024) + with sftp.open(sftp.FOLDER + "/special", "w") as f: + f.write("x" * 1024) - stat = sftp.stat(FOLDER + '/special') - sftp.chmod(FOLDER + '/special', (stat.st_mode & ~o777) | o600) - stat = sftp.stat(FOLDER + '/special') + stat = sftp.stat(sftp.FOLDER + "/special") + sftp.chmod(sftp.FOLDER + "/special", (stat.st_mode & ~o777) | o600) + stat = sftp.stat(sftp.FOLDER + "/special") expected_mode = o600 - if sys.platform == 'win32': + if sys.platform == "win32": # chmod not really functional on windows expected_mode = o666 - if sys.platform == 'cygwin': + if sys.platform == "cygwin": # even worse. expected_mode = o644 - self.assertEqual(stat.st_mode & o777, expected_mode) - self.assertEqual(stat.st_size, 1024) + assert stat.st_mode & o777 == expected_mode + assert stat.st_size == 1024 mtime = stat.st_mtime - 3600 atime = stat.st_atime - 1800 - sftp.utime(FOLDER + '/special', (atime, mtime)) - stat = sftp.stat(FOLDER + '/special') - self.assertEqual(stat.st_mtime, mtime) - if sys.platform not in ('win32', 'cygwin'): - self.assertEqual(stat.st_atime, atime) + sftp.utime(sftp.FOLDER + "/special", (atime, mtime)) + stat = sftp.stat(sftp.FOLDER + "/special") + assert stat.st_mtime == mtime + if sys.platform not in ("win32", "cygwin"): + assert stat.st_atime == atime # can't really test chown, since we'd have to know a valid uid. - sftp.truncate(FOLDER + '/special', 512) - stat = sftp.stat(FOLDER + '/special') - self.assertEqual(stat.st_size, 512) + sftp.truncate(sftp.FOLDER + "/special", 512) + stat = sftp.stat(sftp.FOLDER + "/special") + assert stat.st_size == 512 finally: - sftp.remove(FOLDER + '/special') + sftp.remove(sftp.FOLDER + "/special") - def test_9_fsetstat(self): + def test_fsetstat(self, sftp): """ verify that the fsetstat functions (chown, chmod, utime, truncate) work on open files. """ try: - with sftp.open(FOLDER + '/special', 'w') as f: - f.write('x' * 1024) + with sftp.open(sftp.FOLDER + "/special", "w") as f: + f.write("x" * 1024) - with sftp.open(FOLDER + '/special', 'r+') as f: + with sftp.open(sftp.FOLDER + "/special", "r+") as f: stat = f.stat() f.chmod((stat.st_mode & ~o777) | o600) stat = f.stat() expected_mode = o600 - if sys.platform == 'win32': + if sys.platform == "win32": # chmod not really functional on windows expected_mode = o666 - if sys.platform == 'cygwin': + if sys.platform == "cygwin": # even worse. expected_mode = o644 - self.assertEqual(stat.st_mode & o777, expected_mode) - self.assertEqual(stat.st_size, 1024) + assert stat.st_mode & o777 == expected_mode + assert stat.st_size == 1024 mtime = stat.st_mtime - 3600 atime = stat.st_atime - 1800 f.utime((atime, mtime)) stat = f.stat() - self.assertEqual(stat.st_mtime, mtime) - if sys.platform not in ('win32', 'cygwin'): - self.assertEqual(stat.st_atime, atime) + assert stat.st_mtime == mtime + if sys.platform not in ("win32", "cygwin"): + assert stat.st_atime == atime # can't really test chown, since we'd have to know a valid uid. f.truncate(512) stat = f.stat() - self.assertEqual(stat.st_size, 512) + assert stat.st_size == 512 finally: - sftp.remove(FOLDER + '/special') + sftp.remove(sftp.FOLDER + "/special") - def test_A_readline_seek(self): + def test_readline_seek(self, sftp): """ - create a text file and write a bunch of text into it. then count the lines - in the file, and seek around to retreive particular lines. this should - verify that read buffering and 'tell' work well together, and that read - buffering is reset on 'seek'. + create a text file and write a bunch of text into it. then count the + lines in the file, and seek around to retrieve particular lines. this + should verify that read buffering and 'tell' work well together, and + that read buffering is reset on 'seek'. """ try: - with sftp.open(FOLDER + '/duck.txt', 'w') as f: + with sftp.open(sftp.FOLDER + "/duck.txt", "w") as f: f.write(ARTICLE) - with sftp.open(FOLDER + '/duck.txt', 'r+') as f: + with sftp.open(sftp.FOLDER + "/duck.txt", "r+") as f: line_number = 0 loc = 0 pos_list = [] @@ -429,35 +377,35 @@ def test_A_readline_seek(self): line_number += 1 pos_list.append(loc) loc = f.tell() - self.assertTrue(f.seekable()) + assert f.seekable() f.seek(pos_list[6], f.SEEK_SET) - self.assertEqual(f.readline(), 'Nouzilly, France.\n') + assert f.readline(), "Nouzilly == France.\n" f.seek(pos_list[17], f.SEEK_SET) - self.assertEqual(f.readline()[:4], 'duck') + assert f.readline()[:4] == "duck" f.seek(pos_list[10], f.SEEK_SET) - self.assertEqual(f.readline(), 'duck types were equally resistant to exogenous insulin compared with chicken.\n') + expected = "duck types were equally resistant to exogenous insulin compared with chicken.\n" # noqa + assert f.readline() == expected finally: - sftp.remove(FOLDER + '/duck.txt') + sftp.remove(sftp.FOLDER + "/duck.txt") - def test_B_write_seek(self): + def test_write_seek(self, sftp): """ - create a text file, seek back and change part of it, and verify that the - changes worked. + Create a text file, seek back, change it, and verify. """ try: - with sftp.open(FOLDER + '/testing.txt', 'w') as f: - f.write('hello kitty.\n') + with sftp.open(sftp.FOLDER + "/testing.txt", "w") as f: + f.write("hello kitty.\n") f.seek(-5, f.SEEK_CUR) - f.write('dd') + f.write("dd") - self.assertEqual(sftp.stat(FOLDER + '/testing.txt').st_size, 13) - with sftp.open(FOLDER + '/testing.txt', 'r') as f: + assert sftp.stat(sftp.FOLDER + "/testing.txt").st_size == 13 + with sftp.open(sftp.FOLDER + "/testing.txt", "r") as f: data = f.read(20) - self.assertEqual(data, b'hello kiddy.\n') + assert data == b"hello kiddy.\n" finally: - sftp.remove(FOLDER + '/testing.txt') + sftp.remove(sftp.FOLDER + "/testing.txt") - def test_C_symlink(self): + def test_symlink(self, sftp): """ create a symlink and then check that lstat doesn't follow it. """ @@ -466,361 +414,423 @@ def test_C_symlink(self): return try: - with sftp.open(FOLDER + '/original.txt', 'w') as f: - f.write('original\n') - sftp.symlink('original.txt', FOLDER + '/link.txt') - self.assertEqual(sftp.readlink(FOLDER + '/link.txt'), 'original.txt') + with sftp.open(sftp.FOLDER + "/original.txt", "w") as f: + f.write("original\n") + sftp.symlink("original.txt", sftp.FOLDER + "/link.txt") + assert sftp.readlink(sftp.FOLDER + "/link.txt") == "original.txt" - with sftp.open(FOLDER + '/link.txt', 'r') as f: - self.assertEqual(f.readlines(), ['original\n']) + with sftp.open(sftp.FOLDER + "/link.txt", "r") as f: + assert f.readlines() == ["original\n"] - cwd = sftp.normalize('.') - if cwd[-1] == '/': + cwd = sftp.normalize(".") + if cwd[-1] == "/": cwd = cwd[:-1] - abs_path = cwd + '/' + FOLDER + '/original.txt' - sftp.symlink(abs_path, FOLDER + '/link2.txt') - self.assertEqual(abs_path, sftp.readlink(FOLDER + '/link2.txt')) + abs_path = cwd + "/" + sftp.FOLDER + "/original.txt" + sftp.symlink(abs_path, sftp.FOLDER + "/link2.txt") + assert abs_path == sftp.readlink(sftp.FOLDER + "/link2.txt") - self.assertEqual(sftp.lstat(FOLDER + '/link.txt').st_size, 12) - self.assertEqual(sftp.stat(FOLDER + '/link.txt').st_size, 9) + assert sftp.lstat(sftp.FOLDER + "/link.txt").st_size == 12 + assert sftp.stat(sftp.FOLDER + "/link.txt").st_size == 9 # the sftp server may be hiding extra path members from us, so the # length may be longer than we expect: - self.assertTrue(sftp.lstat(FOLDER + '/link2.txt').st_size >= len(abs_path)) - self.assertEqual(sftp.stat(FOLDER + '/link2.txt').st_size, 9) - self.assertEqual(sftp.stat(FOLDER + '/original.txt').st_size, 9) + assert sftp.lstat(sftp.FOLDER + "/link2.txt").st_size >= len( + abs_path + ) + assert sftp.stat(sftp.FOLDER + "/link2.txt").st_size == 9 + assert sftp.stat(sftp.FOLDER + "/original.txt").st_size == 9 finally: try: - sftp.remove(FOLDER + '/link.txt') + sftp.remove(sftp.FOLDER + "/link.txt") except: pass try: - sftp.remove(FOLDER + '/link2.txt') + sftp.remove(sftp.FOLDER + "/link2.txt") except: pass try: - sftp.remove(FOLDER + '/original.txt') + sftp.remove(sftp.FOLDER + "/original.txt") except: pass - def test_D_flush_seek(self): + def test_flush_seek(self, sftp): """ verify that buffered writes are automatically flushed on seek. """ try: - with sftp.open(FOLDER + '/happy.txt', 'w', 1) as f: - f.write('full line.\n') - f.write('partial') + with sftp.open(sftp.FOLDER + "/happy.txt", "w", 1) as f: + f.write("full line.\n") + f.write("partial") f.seek(9, f.SEEK_SET) - f.write('?\n') + f.write("?\n") - with sftp.open(FOLDER + '/happy.txt', 'r') as f: - self.assertEqual(f.readline(), u('full line?\n')) - self.assertEqual(f.read(7), b'partial') + with sftp.open(sftp.FOLDER + "/happy.txt", "r") as f: + assert f.readline() == u("full line?\n") + assert f.read(7) == b"partial" finally: try: - sftp.remove(FOLDER + '/happy.txt') + sftp.remove(sftp.FOLDER + "/happy.txt") except: pass - def test_E_realpath(self): + def test_realpath(self, sftp): """ test that realpath is returning something non-empty and not an error. """ - pwd = sftp.normalize('.') - self.assertTrue(len(pwd) > 0) - f = sftp.normalize('./' + FOLDER) - self.assertTrue(len(f) > 0) - self.assertEqual(os.path.join(pwd, FOLDER), f) + pwd = sftp.normalize(".") + assert len(pwd) > 0 + f = sftp.normalize("./" + sftp.FOLDER) + assert len(f) > 0 + assert os.path.join(pwd, sftp.FOLDER) == f - def test_F_mkdir(self): + def test_mkdir(self, sftp): """ verify that mkdir/rmdir work. """ - try: - sftp.mkdir(FOLDER + '/subfolder') - except: - self.assertTrue(False, 'exception creating subfolder') - try: - sftp.mkdir(FOLDER + '/subfolder') - self.assertTrue(False, 'no exception overwriting subfolder') - except IOError: - pass - try: - sftp.rmdir(FOLDER + '/subfolder') - except: - self.assertTrue(False, 'exception removing subfolder') - try: - sftp.rmdir(FOLDER + '/subfolder') - self.assertTrue(False, 'no exception removing nonexistent subfolder') - except IOError: - pass + sftp.mkdir(sftp.FOLDER + "/subfolder") + with pytest.raises(IOError): # generic msg only + sftp.mkdir(sftp.FOLDER + "/subfolder") + sftp.rmdir(sftp.FOLDER + "/subfolder") + with pytest.raises(IOError, match="No such file"): + sftp.rmdir(sftp.FOLDER + "/subfolder") - def test_G_chdir(self): + def test_chdir(self, sftp): """ verify that chdir/getcwd work. """ - root = sftp.normalize('.') - if root[-1] != '/': - root += '/' - try: - sftp.mkdir(FOLDER + '/alpha') - sftp.chdir(FOLDER + '/alpha') - sftp.mkdir('beta') - self.assertEqual(root + FOLDER + '/alpha', sftp.getcwd()) - self.assertEqual(['beta'], sftp.listdir('.')) - - sftp.chdir('beta') - with sftp.open('fish', 'w') as f: - f.write('hello\n') - sftp.chdir('..') - self.assertEqual(['fish'], sftp.listdir('beta')) - sftp.chdir('..') - self.assertEqual(['fish'], sftp.listdir('alpha/beta')) + root = sftp.normalize(".") + if root[-1] != "/": + root += "/" + try: + sftp.mkdir(sftp.FOLDER + "/alpha") + sftp.chdir(sftp.FOLDER + "/alpha") + sftp.mkdir("beta") + assert root + sftp.FOLDER + "/alpha" == sftp.getcwd() + assert ["beta"] == sftp.listdir(".") + + sftp.chdir("beta") + with sftp.open("fish", "w") as f: + f.write("hello\n") + sftp.chdir("..") + assert ["fish"] == sftp.listdir("beta") + sftp.chdir("..") + assert ["fish"] == sftp.listdir("alpha/beta") finally: sftp.chdir(root) try: - sftp.unlink(FOLDER + '/alpha/beta/fish') + sftp.unlink(sftp.FOLDER + "/alpha/beta/fish") except: pass try: - sftp.rmdir(FOLDER + '/alpha/beta') + sftp.rmdir(sftp.FOLDER + "/alpha/beta") except: pass try: - sftp.rmdir(FOLDER + '/alpha') + sftp.rmdir(sftp.FOLDER + "/alpha") except: pass - def test_H_get_put(self): + def test_get_put(self, sftp): """ verify that get/put work. """ - warnings.filterwarnings('ignore', 'tempnam.*') + warnings.filterwarnings("ignore", "tempnam.*") fd, localname = mkstemp() os.close(fd) - text = b'All I wanted was a plastic bunny rabbit.\n' - with open(localname, 'wb') as f: + text = b"All I wanted was a plastic bunny rabbit.\n" + with open(localname, "wb") as f: f.write(text) saved_progress = [] def progress_callback(x, y): saved_progress.append((x, y)) - sftp.put(localname, FOLDER + '/bunny.txt', progress_callback) - with sftp.open(FOLDER + '/bunny.txt', 'rb') as f: - self.assertEqual(text, f.read(128)) - self.assertEqual([(41, 41)], saved_progress) + sftp.put(localname, sftp.FOLDER + "/bunny.txt", progress_callback) + + with sftp.open(sftp.FOLDER + "/bunny.txt", "rb") as f: + assert text == f.read(128) + assert [(41, 41)] == saved_progress os.unlink(localname) fd, localname = mkstemp() os.close(fd) saved_progress = [] - sftp.get(FOLDER + '/bunny.txt', localname, progress_callback) + sftp.get(sftp.FOLDER + "/bunny.txt", localname, progress_callback) - with open(localname, 'rb') as f: - self.assertEqual(text, f.read(128)) - self.assertEqual([(41, 41)], saved_progress) + with open(localname, "rb") as f: + assert text == f.read(128) + assert [(41, 41)] == saved_progress os.unlink(localname) - sftp.unlink(FOLDER + '/bunny.txt') + sftp.unlink(sftp.FOLDER + "/bunny.txt") - def test_I_check(self): + def test_get_without_prefetch(self, sftp): + """ + Create a 4MB file. Verify that pull works without prefetching + using a lager file. + """ + + sftp_filename = sftp.FOLDER + "/dummy_file" + num_chars = 1024 * 1024 * 4 + + fd, localname = mkstemp() + os.close(fd) + + with open(localname, "wb") as f: + f.write(b"0" * num_chars) + + sftp.put(localname, sftp_filename) + + os.unlink(localname) + fd, localname = mkstemp() + os.close(fd) + + sftp.get(sftp_filename, localname, prefetch=False) + + assert os.stat(localname).st_size == num_chars + + os.unlink(localname) + sftp.unlink(sftp_filename) + + def test_check(self, sftp): """ verify that file.check() works against our own server. (it's an sftp extension that we support, and may be the only ones who support it.) """ - with sftp.open(FOLDER + '/kitty.txt', 'w') as f: - f.write('here kitty kitty' * 64) - - try: - with sftp.open(FOLDER + '/kitty.txt', 'r') as f: - sum = f.check('sha1') - self.assertEqual('91059CFC6615941378D413CB5ADAF4C5EB293402', u(hexlify(sum)).upper()) - sum = f.check('md5', 0, 512) - self.assertEqual('93DE4788FCA28D471516963A1FE3856A', u(hexlify(sum)).upper()) - sum = f.check('md5', 0, 0, 510) - self.assertEqual('EB3B45B8CD55A0707D99B177544A319F373183D241432BB2157AB9E46358C4AC90370B5CADE5D90336FC1716F90B36D6', - u(hexlify(sum)).upper()) + with sftp.open(sftp.FOLDER + "/kitty.txt", "w") as f: + f.write("here kitty kitty" * 64) + + try: + with sftp.open(sftp.FOLDER + "/kitty.txt", "r") as f: + sum = f.check("sha1") + assert ( + "91059CFC6615941378D413CB5ADAF4C5EB293402" + == u(hexlify(sum)).upper() + ) + sum = f.check("md5", 0, 512) + assert ( + "93DE4788FCA28D471516963A1FE3856A" + == u(hexlify(sum)).upper() + ) + sum = f.check("md5", 0, 0, 510) + expected = "EB3B45B8CD55A0707D99B177544A319F373183D241432BB2157AB9E46358C4AC90370B5CADE5D90336FC1716F90B36D6" # noqa + assert u(hexlify(sum)).upper() == expected finally: - sftp.unlink(FOLDER + '/kitty.txt') + sftp.unlink(sftp.FOLDER + "/kitty.txt") - def test_J_x_flag(self): + def test_x_flag(self, sftp): """ verify that the 'x' flag works when opening a file. """ - sftp.open(FOLDER + '/unusual.txt', 'wx').close() + sftp.open(sftp.FOLDER + "/unusual.txt", "wx").close() try: try: - sftp.open(FOLDER + '/unusual.txt', 'wx') - self.fail('expected exception') + sftp.open(sftp.FOLDER + "/unusual.txt", "wx") + self.fail("expected exception") except IOError: pass finally: - sftp.unlink(FOLDER + '/unusual.txt') + sftp.unlink(sftp.FOLDER + "/unusual.txt") - def test_K_utf8(self): + def test_utf8(self, sftp): """ verify that unicode strings are encoded into utf8 correctly. """ - with sftp.open(FOLDER + '/something', 'w') as f: - f.write('okay') + with sftp.open(sftp.FOLDER + "/something", "w") as f: + f.write("okay") try: - sftp.rename(FOLDER + '/something', FOLDER + '/' + unicode_folder) - sftp.open(b(FOLDER) + utf8_folder, 'r') + sftp.rename( + sftp.FOLDER + "/something", sftp.FOLDER + "/" + unicode_folder + ) + sftp.open(b(sftp.FOLDER) + utf8_folder, "r") except Exception as e: - self.fail('exception ' + str(e)) - sftp.unlink(b(FOLDER) + utf8_folder) + self.fail("exception " + str(e)) + sftp.unlink(b(sftp.FOLDER) + utf8_folder) - def test_L_utf8_chdir(self): - sftp.mkdir(FOLDER + '/' + unicode_folder) + def test_utf8_chdir(self, sftp): + sftp.mkdir(sftp.FOLDER + "/" + unicode_folder) try: - sftp.chdir(FOLDER + '/' + unicode_folder) - with sftp.open('something', 'w') as f: - f.write('okay') - sftp.unlink('something') + sftp.chdir(sftp.FOLDER + "/" + unicode_folder) + with sftp.open("something", "w") as f: + f.write("okay") + sftp.unlink("something") finally: sftp.chdir() - sftp.rmdir(FOLDER + '/' + unicode_folder) + sftp.rmdir(sftp.FOLDER + "/" + unicode_folder) - def test_M_bad_readv(self): + def test_bad_readv(self, sftp): """ verify that readv at the end of the file doesn't essplode. """ - sftp.open(FOLDER + '/zero', 'w').close() + sftp.open(sftp.FOLDER + "/zero", "w").close() try: - with sftp.open(FOLDER + '/zero', 'r') as f: + with sftp.open(sftp.FOLDER + "/zero", "r") as f: f.readv([(0, 12)]) - with sftp.open(FOLDER + '/zero', 'r') as f: + with sftp.open(sftp.FOLDER + "/zero", "r") as f: file_size = f.stat().st_size f.prefetch(file_size) f.read(100) finally: - sftp.unlink(FOLDER + '/zero') + sftp.unlink(sftp.FOLDER + "/zero") - def test_N_put_without_confirm(self): + def test_put_without_confirm(self, sftp): """ verify that get/put work without confirmation. """ - warnings.filterwarnings('ignore', 'tempnam.*') + warnings.filterwarnings("ignore", "tempnam.*") fd, localname = mkstemp() os.close(fd) - text = b'All I wanted was a plastic bunny rabbit.\n' - with open(localname, 'wb') as f: + text = b"All I wanted was a plastic bunny rabbit.\n" + with open(localname, "wb") as f: f.write(text) saved_progress = [] def progress_callback(x, y): saved_progress.append((x, y)) - res = sftp.put(localname, FOLDER + '/bunny.txt', progress_callback, False) - self.assertEqual(SFTPAttributes().attr, res.attr) + res = sftp.put( + localname, sftp.FOLDER + "/bunny.txt", progress_callback, False + ) - with sftp.open(FOLDER + '/bunny.txt', 'r') as f: - self.assertEqual(text, f.read(128)) - self.assertEqual((41, 41), saved_progress[-1]) + assert SFTPAttributes().attr == res.attr + + with sftp.open(sftp.FOLDER + "/bunny.txt", "r") as f: + assert text == f.read(128) + assert (41, 41) == saved_progress[-1] os.unlink(localname) - sftp.unlink(FOLDER + '/bunny.txt') + sftp.unlink(sftp.FOLDER + "/bunny.txt") - def test_O_getcwd(self): + def test_getcwd(self, sftp): """ verify that chdir/getcwd work. """ - self.assertEqual(None, sftp.getcwd()) - root = sftp.normalize('.') - if root[-1] != '/': - root += '/' + assert sftp.getcwd() is None + root = sftp.normalize(".") + if root[-1] != "/": + root += "/" try: - sftp.mkdir(FOLDER + '/alpha') - sftp.chdir(FOLDER + '/alpha') - self.assertEqual('/' + FOLDER + '/alpha', sftp.getcwd()) + sftp.mkdir(sftp.FOLDER + "/alpha") + sftp.chdir(sftp.FOLDER + "/alpha") + assert sftp.getcwd() == "/" + sftp.FOLDER + "/alpha" finally: sftp.chdir(root) try: - sftp.rmdir(FOLDER + '/alpha') + sftp.rmdir(sftp.FOLDER + "/alpha") except: pass - def XXX_test_M_seek_append(self): + def test_seek_append(self, sftp): """ verify that seek does't affect writes during append. does not work except through paramiko. :( openssh fails. """ try: - with sftp.open(FOLDER + '/append.txt', 'a') as f: - f.write('first line\nsecond line\n') + with sftp.open(sftp.FOLDER + "/append.txt", "a") as f: + f.write("first line\nsecond line\n") f.seek(11, f.SEEK_SET) - f.write('third line\n') + f.write("third line\n") - with sftp.open(FOLDER + '/append.txt', 'r') as f: - self.assertEqual(f.stat().st_size, 34) - self.assertEqual(f.readline(), 'first line\n') - self.assertEqual(f.readline(), 'second line\n') - self.assertEqual(f.readline(), 'third line\n') + with sftp.open(sftp.FOLDER + "/append.txt", "r") as f: + assert f.stat().st_size == 34 + assert f.readline() == "first line\n" + assert f.readline() == "second line\n" + assert f.readline() == "third line\n" finally: - sftp.remove(FOLDER + '/append.txt') + sftp.remove(sftp.FOLDER + "/append.txt") - def test_putfo_empty_file(self): + def test_putfo_empty_file(self, sftp): """ Send an empty file and confirm it is sent. """ - target = FOLDER + '/empty file.txt' + target = sftp.FOLDER + "/empty file.txt" stream = StringIO() try: attrs = sftp.putfo(stream, target) # the returned attributes should not be null - self.assertNotEqual(attrs, None) + assert attrs is not None finally: sftp.remove(target) - - def test_N_file_with_percent(self): + # TODO: this test doesn't actually fail if the regression (removing '%' + # expansion to '%%' within sftp.py's def _log()) is removed - stacktraces + # appear but they're clearly emitted from subthreads that have no error + # handling. No point running it until that is fixed somehow. + @pytest.mark.skip("Doesn't prove anything right now") + def test_file_with_percent(self, sftp): """ verify that we can create a file with a '%' in the filename. ( it needs to be properly escaped by _log() ) """ - self.assertTrue( paramiko.util.get_logger("paramiko").handlers, "This unit test requires logging to be enabled" ) - f = sftp.open(FOLDER + '/test%file', 'w') + f = sftp.open(sftp.FOLDER + "/test%file", "w") try: - self.assertEqual(f.stat().st_size, 0) + assert f.stat().st_size == 0 finally: f.close() - sftp.remove(FOLDER + '/test%file') + sftp.remove(sftp.FOLDER + "/test%file") - - def test_O_non_utf8_data(self): + def test_non_utf8_data(self, sftp): """Test write() and read() of non utf8 data""" try: - with sftp.open('%s/nonutf8data' % FOLDER, 'w') as f: + with sftp.open("%s/nonutf8data" % sftp.FOLDER, "w") as f: f.write(NON_UTF8_DATA) - with sftp.open('%s/nonutf8data' % FOLDER, 'r') as f: + with sftp.open("%s/nonutf8data" % sftp.FOLDER, "r") as f: data = f.read() - self.assertEqual(data, NON_UTF8_DATA) - with sftp.open('%s/nonutf8data' % FOLDER, 'wb') as f: + assert data == NON_UTF8_DATA + with sftp.open("%s/nonutf8data" % sftp.FOLDER, "wb") as f: f.write(NON_UTF8_DATA) - with sftp.open('%s/nonutf8data' % FOLDER, 'rb') as f: + with sftp.open("%s/nonutf8data" % sftp.FOLDER, "rb") as f: data = f.read() - self.assertEqual(data, NON_UTF8_DATA) + assert data == NON_UTF8_DATA finally: - sftp.remove('%s/nonutf8data' % FOLDER) + sftp.remove("%s/nonutf8data" % sftp.FOLDER) + @requireNonAsciiLocale("LC_TIME") + def test_sftp_attributes_locale_time(self, sftp): + """Test SFTPAttributes under a locale with non-ascii time strings.""" + some_stat = os.stat(sftp.FOLDER) + sftp_attributes = SFTPAttributes.from_stat(some_stat, u("a_directory")) + self.assertTrue(b"a_directory" in sftp_attributes.asbytes()) - def test_sftp_attributes_empty_str(self): + def test_sftp_attributes_empty_str(self, sftp): sftp_attributes = SFTPAttributes() - self.assertEqual(str(sftp_attributes), "?--------- 1 0 0 0 (unknown date) ?") + assert ( + str(sftp_attributes) + == "?--------- 1 0 0 0 (unknown date) ?" + ) + + @needs_builtin("buffer") + def test_write_buffer(self, sftp): + """Test write() using a buffer instance.""" + data = 3 * b"A potentially large block of data to chunk up.\n" + try: + with sftp.open("%s/write_buffer" % sftp.FOLDER, "wb") as f: + for offset in range(0, len(data), 8): + f.write(buffer(data, offset, 8)) # noqa + + with sftp.open("%s/write_buffer" % sftp.FOLDER, "rb") as f: + assert f.read() == data + finally: + sftp.remove("%s/write_buffer" % sftp.FOLDER) + @needs_builtin("memoryview") + def test_write_memoryview(self, sftp): + """Test write() using a memoryview instance.""" + data = 3 * b"A potentially large block of data to chunk up.\n" + try: + with sftp.open("%s/write_memoryview" % sftp.FOLDER, "wb") as f: + view = memoryview(data) + for offset in range(0, len(data), 8): + f.write(view[offset : offset + 8]) -if __name__ == '__main__': - SFTPTest.init_loopback() - # logging is required by test_N_file_with_percent - paramiko.util.log_to_file('test_sftp.log') - from unittest import main - main() + with sftp.open("%s/write_memoryview" % sftp.FOLDER, "rb") as f: + assert f.read() == data + finally: + sftp.remove("%s/write_memoryview" % sftp.FOLDER) diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py index cfad5682b..fc556faf4 100644 --- a/tests/test_sftp_big.py +++ b/tests/test_sftp_big.py @@ -23,119 +23,105 @@ do test file operations in (so no existing files will be harmed). """ -import os import random import struct import sys import time -import unittest from paramiko.common import o660 -from tests.test_sftp import get_sftp -FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000') +from .util import slow -class BigSFTPTest (unittest.TestCase): - - def setUp(self): - global FOLDER - sftp = get_sftp() - for i in range(1000): - FOLDER = FOLDER[:-3] + '%03d' % i - try: - sftp.mkdir(FOLDER) - break - except (IOError, OSError): - pass - - def tearDown(self): - sftp = get_sftp() - sftp.rmdir(FOLDER) - - def test_1_lots_of_files(self): +@slow +class TestBigSFTP(object): + def test_lots_of_files(self, sftp): """ create a bunch of files over the same session. """ - sftp = get_sftp() numfiles = 100 try: for i in range(numfiles): - with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f: - f.write('this is file #%d.\n' % i) - sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660) + with sftp.open( + "%s/file%d.txt" % (sftp.FOLDER, i), "w", 1 + ) as f: + f.write("this is file #%d.\n" % i) + sftp.chmod("%s/file%d.txt" % (sftp.FOLDER, i), o660) # now make sure every file is there, by creating a list of filenmes # and reading them in random order. numlist = list(range(numfiles)) while len(numlist) > 0: r = numlist[random.randint(0, len(numlist) - 1)] - with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f: - self.assertEqual(f.readline(), 'this is file #%d.\n' % r) + with sftp.open("%s/file%d.txt" % (sftp.FOLDER, r)) as f: + assert f.readline() == "this is file #%d.\n" % r numlist.remove(r) finally: for i in range(numfiles): try: - sftp.remove('%s/file%d.txt' % (FOLDER, i)) + sftp.remove("%s/file%d.txt" % (sftp.FOLDER, i)) except: pass - def test_2_big_file(self): + def test_big_file(self, sftp): """ write a 1MB file with no buffering. """ - sftp = get_sftp() - kblob = (1024 * b'x') + kblob = 1024 * b"x" start = time.time() try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f: for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) end = time.time() - sys.stderr.write('%ds ' % round(end - start)) - + sys.stderr.write("%ds " % round(end - start)) + start = time.time() - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f: for n in range(1024): data = f.read(1024) - self.assertEqual(data, kblob) + assert data == kblob end = time.time() - sys.stderr.write('%ds ' % round(end - start)) + sys.stderr.write("%ds " % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove("%s/hongry.txt" % sftp.FOLDER) - def test_3_big_file_pipelined(self): + def test_big_file_pipelined(self, sftp): """ write a 1MB file, with no linefeeds, using pipelining. """ - sftp = get_sftp() - kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) + kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) start = time.time() try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) end = time.time() - sys.stderr.write('%ds ' % round(end - start)) - + sys.stderr.write("%ds " % round(end - start)) + start = time.time() - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) - # read on odd boundaries to make sure the bytes aren't getting scrambled + # read on odd boundaries to make sure the bytes aren't getting + # scrambled n = 0 k2blob = kblob + kblob chunk = 629 @@ -145,36 +131,39 @@ def test_3_big_file_pipelined(self): chunk = size - n data = f.read(chunk) offset = n % 1024 - self.assertEqual(data, k2blob[offset:offset + chunk]) + assert data == k2blob[offset : offset + chunk] n += chunk end = time.time() - sys.stderr.write('%ds ' % round(end - start)) + sys.stderr.write("%ds " % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove("%s/hongry.txt" % sftp.FOLDER) - def test_4_prefetch_seek(self): - sftp = get_sftp() - kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) + def test_prefetch_seek(self, sftp): + kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') - - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) - + sys.stderr.write(".") + sys.stderr.write(" ") + + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) + start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) - base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) + base_offset = (512 * 1024) + 17 * random.randint( + 1000, 2000 + ) offsets = [base_offset + j * chunk for j in range(100)] # randomly seek around and read them out for j in range(100): @@ -183,33 +172,36 @@ def test_4_prefetch_seek(self): f.seek(offset) data = f.read(chunk) n_offset = offset % 1024 - self.assertEqual(data, k2blob[n_offset:n_offset + chunk]) + assert data == k2blob[n_offset : n_offset + chunk] offset += chunk end = time.time() - sys.stderr.write('%ds ' % round(end - start)) + sys.stderr.write("%ds " % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove("%s/hongry.txt" % sftp.FOLDER) - def test_5_readv_seek(self): - sftp = get_sftp() - kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) + def test_readv_seek(self, sftp): + kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: - base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f: + base_offset = (512 * 1024) + 17 * random.randint( + 1000, 2000 + ) # make a bunch of offsets and put them in random order offsets = [base_offset + j * chunk for j in range(100)] readv_list = [] @@ -221,155 +213,162 @@ def test_5_readv_seek(self): for i in range(len(readv_list)): offset = readv_list[i][0] n_offset = offset % 1024 - self.assertEqual(next(ret), k2blob[n_offset:n_offset + chunk]) + assert next(ret) == k2blob[n_offset : n_offset + chunk] end = time.time() - sys.stderr.write('%ds ' % round(end - start)) + sys.stderr.write("%ds " % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove("%s/hongry.txt" % sftp.FOLDER) - def test_6_lots_of_prefetching(self): + def test_lots_of_prefetching(self, sftp): """ prefetch a 1MB file a bunch of times, discarding the file object without using it, to verify that paramiko doesn't get confused. """ - sftp = get_sftp() - kblob = (1024 * b'x') + kblob = 1024 * b"x" try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f: file_size = f.stat().st_size f.prefetch(file_size) - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f: file_size = f.stat().st_size f.prefetch(file_size) for n in range(1024): data = f.read(1024) - self.assertEqual(data, kblob) + assert data == kblob if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") finally: - sftp.remove('%s/hongry.txt' % FOLDER) - - def test_7_prefetch_readv(self): + sftp.remove("%s/hongry.txt" % sftp.FOLDER) + + def test_prefetch_readv(self, sftp): """ verify that prefetch and readv don't conflict with each other. """ - sftp = get_sftp() - kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) + kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') - - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + sys.stderr.write(".") + sys.stderr.write(" ") + + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) data = f.read(1024) - self.assertEqual(data, kblob) + assert data == kblob chunk_size = 793 base_offset = 512 * 1024 k2blob = kblob + kblob - chunks = [(base_offset + (chunk_size * i), chunk_size) for i in range(20)] + chunks = [ + (base_offset + (chunk_size * i), chunk_size) + for i in range(20) + ] for data in f.readv(chunks): offset = base_offset % 1024 - self.assertEqual(chunk_size, len(data)) - self.assertEqual(k2blob[offset:offset + chunk_size], data) + assert chunk_size == len(data) + assert k2blob[offset : offset + chunk_size] == data base_offset += chunk_size - sys.stderr.write(' ') + sys.stderr.write(" ") finally: - sftp.remove('%s/hongry.txt' % FOLDER) - - def test_8_large_readv(self): + sftp.remove("%s/hongry.txt" % sftp.FOLDER) + + def test_large_readv(self, sftp): """ verify that a very large readv is broken up correctly and still returned as a single blob. """ - sftp = get_sftp() - kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) + kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: - sys.stderr.write('.') - sys.stderr.write(' ') + sys.stderr.write(".") + sys.stderr.write(" ") + + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) - - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f: data = list(f.readv([(23 * 1024, 128 * 1024)])) - self.assertEqual(1, len(data)) + assert len(data) == 1 data = data[0] - self.assertEqual(128 * 1024, len(data)) - - sys.stderr.write(' ') + assert len(data) == 128 * 1024 + + sys.stderr.write(" ") finally: - sftp.remove('%s/hongry.txt' % FOLDER) - - def test_9_big_file_big_buffer(self): + sftp.remove("%s/hongry.txt" % sftp.FOLDER) + + def test_big_file_big_buffer(self, sftp): """ write a 1MB file, with no linefeeds, and a big buffer. """ - sftp = get_sftp() - mblob = (1024 * 1024 * 'x') + mblob = 1024 * 1024 * "x" try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: + with sftp.open( + "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024 + ) as f: f.write(mblob) - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) finally: - sftp.remove('%s/hongry.txt' % FOLDER) - - def test_A_big_file_renegotiate(self): + sftp.remove("%s/hongry.txt" % sftp.FOLDER) + + def test_big_file_renegotiate(self, sftp): """ write a 1MB file, forcing key renegotiation in the middle. """ - sftp = get_sftp() t = sftp.sock.get_transport() t.packetizer.REKEY_BYTES = 512 * 1024 - k32blob = (32 * 1024 * 'x') + k32blob = 32 * 1024 * "x" try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: + with sftp.open( + "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024 + ) as f: for i in range(32): f.write(k32blob) - self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024) - self.assertNotEqual(t.H, t.session_id) - + assert ( + sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024 + ) + assert t.H != t.session_id + # try to read it too. - with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f: + with sftp.open( + "%s/hongry.txt" % sftp.FOLDER, "r", 128 * 1024 + ) as f: file_size = f.stat().st_size f.prefetch(file_size) total = 0 while total < 1024 * 1024: total += len(f.read(32 * 1024)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove("%s/hongry.txt" % sftp.FOLDER) t.packetizer.REKEY_BYTES = pow(2, 30) - - -if __name__ == '__main__': - from tests.test_sftp import SFTPTest - SFTPTest.init_loopback() - from unittest import main - main() diff --git a/tests/test_ssh_exception.py b/tests/test_ssh_exception.py index 18f2a97d2..1628986a3 100644 --- a/tests/test_ssh_exception.py +++ b/tests/test_ssh_exception.py @@ -1,31 +1,75 @@ import pickle import unittest -from paramiko.ssh_exception import NoValidConnectionsError +from paramiko import RSAKey +from paramiko.ssh_exception import ( + NoValidConnectionsError, + BadAuthenticationType, + PartialAuthentication, + ChannelException, + BadHostKeyException, + ProxyCommandFailure, +) -class NoValidConnectionsErrorTest (unittest.TestCase): - +class NoValidConnectionsErrorTest(unittest.TestCase): def test_pickling(self): # Regression test for https://github.com/paramiko/paramiko/issues/617 - exc = NoValidConnectionsError({('127.0.0.1', '22'): Exception()}) + exc = NoValidConnectionsError({("127.0.0.1", "22"): Exception()}) new_exc = pickle.loads(pickle.dumps(exc)) self.assertEqual(type(exc), type(new_exc)) self.assertEqual(str(exc), str(new_exc)) self.assertEqual(exc.args, new_exc.args) def test_error_message_for_single_host(self): - exc = NoValidConnectionsError({('127.0.0.1', '22'): Exception()}) + exc = NoValidConnectionsError({("127.0.0.1", "22"): Exception()}) assert "Unable to connect to port 22 on 127.0.0.1" in str(exc) def test_error_message_for_two_hosts(self): - exc = NoValidConnectionsError({('127.0.0.1', '22'): Exception(), - ('::1', '22'): Exception()}) + exc = NoValidConnectionsError( + {("127.0.0.1", "22"): Exception(), ("::1", "22"): Exception()} + ) assert "Unable to connect to port 22 on 127.0.0.1 or ::1" in str(exc) def test_error_message_for_multiple_hosts(self): - exc = NoValidConnectionsError({('127.0.0.1', '22'): Exception(), - ('::1', '22'): Exception(), - ('10.0.0.42', '22'): Exception()}) + exc = NoValidConnectionsError( + { + ("127.0.0.1", "22"): Exception(), + ("::1", "22"): Exception(), + ("10.0.0.42", "22"): Exception(), + } + ) exp = "Unable to connect to port 22 on 10.0.0.42, 127.0.0.1 or ::1" assert exp in str(exc) + + +class ExceptionStringDisplayTest(unittest.TestCase): + def test_BadAuthenticationType(self): + exc = BadAuthenticationType( + "Bad authentication type", ["ok", "also-ok"] + ) + expected = "Bad authentication type; allowed types: ['ok', 'also-ok']" + assert str(exc) == expected + + def test_PartialAuthentication(self): + exc = PartialAuthentication(["ok", "also-ok"]) + expected = "Partial authentication; allowed types: ['ok', 'also-ok']" + assert str(exc) == expected + + def test_BadHostKeyException(self): + got_key = RSAKey.generate(2048) + wanted_key = RSAKey.generate(2048) + exc = BadHostKeyException("myhost", got_key, wanted_key) + expected = "Host key for server 'myhost' does not match: got '{}', expected '{}'" # noqa + assert str(exc) == expected.format( + got_key.get_base64(), wanted_key.get_base64() + ) + + def test_ProxyCommandFailure(self): + exc = ProxyCommandFailure("man squid", 7) + expected = 'ProxyCommand("man squid") returned nonzero exit status: 7' + assert str(exc) == expected + + def test_ChannelException(self): + exc = ChannelException(17, "whatever") + assert str(exc) == "ChannelException(17, 'whatever')" diff --git a/tests/test_ssh_gss.py b/tests/test_ssh_gss.py index e20d348f8..92801c203 100644 --- a/tests/test_ssh_gss.py +++ b/tests/test_ssh_gss.py @@ -25,52 +25,59 @@ import socket import threading -import unittest import paramiko +from .util import _support, needs_gssapi, KerberosTestCase, update_env +from .test_client import FINGERPRINTS -class NullServer (paramiko.ServerInterface): +class NullServer(paramiko.ServerInterface): def get_allowed_auths(self, username): - return 'gssapi-with-mic' + return "gssapi-with-mic,publickey" - def check_auth_gssapi_with_mic(self, username, - gss_authenticated=paramiko.AUTH_FAILED, - cc_file=None): + def check_auth_gssapi_with_mic( + self, username, gss_authenticated=paramiko.AUTH_FAILED, cc_file=None + ): if gss_authenticated == paramiko.AUTH_SUCCESSFUL: return paramiko.AUTH_SUCCESSFUL return paramiko.AUTH_FAILED def enable_auth_gssapi(self): - UseGSSAPI = True - GSSAPICleanupCredentials = True - return UseGSSAPI + return True + + def check_auth_publickey(self, username, key): + try: + expected = FINGERPRINTS[key.get_name()] + except KeyError: + return paramiko.AUTH_FAILED + else: + if key.get_fingerprint() == expected: + return paramiko.AUTH_SUCCESSFUL + return paramiko.AUTH_FAILED def check_channel_request(self, kind, chanid): return paramiko.OPEN_SUCCEEDED def check_channel_exec_request(self, channel, command): - if command != 'yes': + if command != b"yes": return False return True -class GSSAuthTest(unittest.TestCase): - @staticmethod - def init(username, hostname): - global krb5_principal, targ_name - krb5_principal = username - targ_name = hostname - +@needs_gssapi +class GSSAuthTest(KerberosTestCase): def setUp(self): - self.username = krb5_principal - self.hostname = socket.getfqdn(targ_name) + # TODO: username and targ_name should come from os.environ or whatever + # the approved pytest method is for runtime-configuring test data. + self.username = self.realm.user_princ + self.hostname = socket.getfqdn(self.realm.hostname) self.sockl = socket.socket() - self.sockl.bind((targ_name, 0)) + self.sockl.bind((self.realm.hostname, 0)) self.sockl.listen(1) self.addr, self.port = self.sockl.getsockname() self.event = threading.Event() + update_env(self, self.realm.env) thread = threading.Thread(target=self._run) thread.start() @@ -82,24 +89,33 @@ def tearDown(self): def _run(self): self.socks, addr = self.sockl.accept() self.ts = paramiko.Transport(self.socks) - host_key = paramiko.RSAKey.from_private_key_file('tests/test_rsa.key') + host_key = paramiko.RSAKey.from_private_key_file("tests/test_rsa.key") self.ts.add_server_key(host_key) server = NullServer() self.ts.start_server(self.event, server) - def test_1_gss_auth(self): + def _test_connection(self, **kwargs): """ - Verify that Paramiko can handle SSHv2 GSS-API / SSPI authentication - (gssapi-with-mic) in client and server mode. + (Most) kwargs get passed directly into SSHClient.connect(). + + The exception is ... no exception yet """ - host_key = paramiko.RSAKey.from_private_key_file('tests/test_rsa.key') + host_key = paramiko.RSAKey.from_private_key_file("tests/test_rsa.key") public_host_key = paramiko.RSAKey(data=host_key.asbytes()) self.tc = paramiko.SSHClient() - self.tc.get_host_keys().add('[%s]:%d' % (self.hostname, self.port), - 'ssh-rsa', public_host_key) - self.tc.connect(self.hostname, self.port, username=self.username, - gss_auth=True) + self.tc.set_missing_host_key_policy(paramiko.WarningPolicy()) + self.tc.get_host_keys().add( + "[%s]:%d" % (self.addr, self.port), "ssh-rsa", public_host_key + ) + self.tc.connect( + hostname=self.addr, + port=self.port, + username=self.username, + gss_host=self.hostname, + gss_auth=True, + **kwargs + ) self.event.wait(1.0) self.assert_(self.event.is_set()) @@ -107,18 +123,38 @@ def test_1_gss_auth(self): self.assertEquals(self.username, self.ts.get_username()) self.assertEquals(True, self.ts.is_authenticated()) - stdin, stdout, stderr = self.tc.exec_command('yes') + stdin, stdout, stderr = self.tc.exec_command("yes") schan = self.ts.accept(1.0) - schan.send('Hello there.\n') - schan.send_stderr('This is on stderr.\n') + schan.send("Hello there.\n") + schan.send_stderr("This is on stderr.\n") schan.close() - self.assertEquals('Hello there.\n', stdout.readline()) - self.assertEquals('', stdout.readline()) - self.assertEquals('This is on stderr.\n', stderr.readline()) - self.assertEquals('', stderr.readline()) + self.assertEquals("Hello there.\n", stdout.readline()) + self.assertEquals("", stdout.readline()) + self.assertEquals("This is on stderr.\n", stderr.readline()) + self.assertEquals("", stderr.readline()) stdin.close() stdout.close() stderr.close() + + def test_gss_auth(self): + """ + Verify that Paramiko can handle SSHv2 GSS-API / SSPI authentication + (gssapi-with-mic) in client and server mode. + """ + self._test_connection(allow_agent=False, look_for_keys=False) + + def test_auth_trickledown(self): + """ + Failed gssapi-with-mic doesn't prevent subsequent key from succeeding + """ + self.hostname = ( + "this_host_does_not_exists_and_causes_a_GSSAPI-exception" + ) + self._test_connection( + key_filename=[_support("test_rsa.key")], + allow_agent=False, + look_for_keys=False, + ) diff --git a/tests/test_transport.py b/tests/test_transport.py index d81ad8f3a..77ffd6c1f 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -23,25 +23,46 @@ from __future__ import with_statement from binascii import hexlify +from contextlib import contextmanager import select import socket import time import threading import random -from hashlib import sha1 import unittest - -from paramiko import Transport, SecurityOptions, ServerInterface, RSAKey, DSSKey, \ - SSHException, ChannelException, Packetizer +from mock import Mock + +from paramiko import ( + AuthHandler, + ChannelException, + DSSKey, + Packetizer, + RSAKey, + SSHException, + AuthenticationException, + IncompatiblePeer, + SecurityOptions, + ServerInterface, + Transport, +) from paramiko import AUTH_FAILED, AUTH_SUCCESSFUL from paramiko import OPEN_SUCCEEDED, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED -from paramiko.common import MSG_KEXINIT, cMSG_CHANNEL_WINDOW_ADJUST, \ - MIN_PACKET_SIZE, MIN_WINDOW_SIZE, MAX_WINDOW_SIZE, \ - DEFAULT_WINDOW_SIZE, DEFAULT_MAX_PACKET_SIZE -from paramiko.py3compat import bytes +from paramiko.common import ( + DEFAULT_MAX_PACKET_SIZE, + DEFAULT_WINDOW_SIZE, + MAX_WINDOW_SIZE, + MIN_PACKET_SIZE, + MIN_WINDOW_SIZE, + MSG_KEXINIT, + MSG_USERAUTH_SUCCESS, + cMSG_CHANNEL_WINDOW_ADJUST, + cMSG_UNIMPLEMENTED, +) +from paramiko.py3compat import bytes, byte_chr from paramiko.message import Message -from tests.loop import LoopSocket -from tests.util import test_path + +from .util import needs_builtin, _support, slow +from .loop import LoopSocket LONG_BANNER = """\ @@ -57,28 +78,36 @@ """ -class NullServer (ServerInterface): +class NullServer(ServerInterface): paranoid_did_password = False paranoid_did_public_key = False - paranoid_key = DSSKey.from_private_key_file(test_path('test_dss.key')) + paranoid_key = DSSKey.from_private_key_file(_support("test_dss.key")) + + def __init__(self, allowed_keys=None): + self.allowed_keys = allowed_keys if allowed_keys is not None else [] def get_allowed_auths(self, username): - if username == 'slowdive': - return 'publickey,password' - return 'publickey' + if username == "slowdive": + return "publickey,password" + return "publickey" def check_auth_password(self, username, password): - if (username == 'slowdive') and (password == 'pygmalion'): + if (username == "slowdive") and (password == "pygmalion"): + return AUTH_SUCCESSFUL + return AUTH_FAILED + + def check_auth_publickey(self, username, key): + if key in self.allowed_keys: return AUTH_SUCCESSFUL return AUTH_FAILED def check_channel_request(self, kind, chanid): - if kind == 'bogus': + if kind == "bogus": return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED return OPEN_SUCCEEDED def check_channel_exec_request(self, channel, command): - if command != b'yes': + if command != b"yes": return False return True @@ -87,9 +116,20 @@ def check_channel_shell_request(self, channel): def check_global_request(self, kind, msg): self._global_request = kind - return False - - def check_channel_x11_request(self, channel, single_connection, auth_protocol, auth_cookie, screen_number): + # NOTE: for w/e reason, older impl of this returned False always, even + # tho that's only supposed to occur if the request cannot be served. + # For now, leaving that the default unless test supplies specific + # 'acceptable' request kind + return kind == "acceptable" + + def check_channel_x11_request( + self, + channel, + single_connection, + auth_protocol, + auth_cookie, + screen_number, + ): self._x11_single_connection = single_connection self._x11_auth_protocol = auth_protocol self._x11_auth_cookie = auth_cookie @@ -98,7 +138,7 @@ def check_channel_x11_request(self, channel, single_connection, auth_protocol, a def check_port_forward_request(self, addr, port): self._listen = socket.socket() - self._listen.bind(('127.0.0.1', 0)) + self._listen.bind(("127.0.0.1", 0)) self._listen.listen(1) return self._listen.getsockname()[1] @@ -125,8 +165,11 @@ def tearDown(self): self.socks.close() self.sockc.close() - def setup_test_server(self, client_options=None, server_options=None): - host_key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + # TODO: unify with newer contextmanager + def setup_test_server( + self, client_options=None, server_options=None, connect_kwargs=None + ): + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) public_host_key = RSAKey(data=host_key.asbytes()) self.ts.add_server_key(host_key) @@ -139,20 +182,25 @@ def setup_test_server(self, client_options=None, server_options=None): self.server = NullServer() self.assertTrue(not event.is_set()) self.ts.start_server(event, self.server) - self.tc.connect(hostkey=public_host_key, - username='slowdive', password='pygmalion') + if connect_kwargs is None: + connect_kwargs = dict( + hostkey=public_host_key, + username="slowdive", + password="pygmalion", + ) + self.tc.connect(**connect_kwargs) event.wait(1.0) self.assertTrue(event.is_set()) self.assertTrue(self.ts.is_active()) - def test_1_security_options(self): + def test_security_options(self): o = self.tc.get_security_options() self.assertEqual(type(o), SecurityOptions) - self.assertTrue(('aes256-cbc', 'blowfish-cbc') != o.ciphers) - o.ciphers = ('aes256-cbc', 'blowfish-cbc') - self.assertEqual(('aes256-cbc', 'blowfish-cbc'), o.ciphers) + self.assertTrue(("aes256-cbc", "blowfish-cbc") != o.ciphers) + o.ciphers = ("aes256-cbc", "blowfish-cbc") + self.assertEqual(("aes256-cbc", "blowfish-cbc"), o.ciphers) try: - o.ciphers = ('aes256-cbc', 'made-up-cipher') + o.ciphers = ("aes256-cbc", "made-up-cipher") self.assertTrue(False) except ValueError: pass @@ -162,21 +210,32 @@ def test_1_security_options(self): except TypeError: pass - def test_2_compute_key(self): - self.tc.K = 123281095979686581523377256114209720774539068973101330872763622971399429481072519713536292772709507296759612401802191955568143056534122385270077606457721553469730659233569339356140085284052436697480759510519672848743794433460113118986816826624865291116513647975790797391795651716378444844877749505443714557929 - self.tc.H = b'\x0C\x83\x07\xCD\xE6\x85\x6F\xF3\x0B\xA9\x36\x84\xEB\x0F\x04\xC2\x52\x0E\x9E\xD3' + def testb_security_options_reset(self): + o = self.tc.get_security_options() + # should not throw any exceptions + o.ciphers = o.ciphers + o.digests = o.digests + o.key_types = o.key_types + o.kex = o.kex + o.compression = o.compression + + def test_compute_key(self): + self.tc.K = 123281095979686581523377256114209720774539068973101330872763622971399429481072519713536292772709507296759612401802191955568143056534122385270077606457721553469730659233569339356140085284052436697480759510519672848743794433460113118986816826624865291116513647975790797391795651716378444844877749505443714557929 # noqa + self.tc.H = b"\x0C\x83\x07\xCD\xE6\x85\x6F\xF3\x0B\xA9\x36\x84\xEB\x0F\x04\xC2\x52\x0E\x9E\xD3" # noqa self.tc.session_id = self.tc.H - key = self.tc._compute_key('C', 32) - self.assertEqual(b'207E66594CA87C44ECCBA3B3CD39FDDB378E6FDB0F97C54B2AA0CFBF900CD995', - hexlify(key).upper()) + key = self.tc._compute_key("C", 32) + self.assertEqual( + b"207E66594CA87C44ECCBA3B3CD39FDDB378E6FDB0F97C54B2AA0CFBF900CD995", # noqa + hexlify(key).upper(), + ) - def test_3_simple(self): + def test_simple(self): """ verify that we can establish an ssh link with ourselves across the loopback sockets. this is hardly "simple" but it's simpler than the later tests. :) """ - host_key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) public_host_key = RSAKey(data=host_key.asbytes()) self.ts.add_server_key(host_key) event = threading.Event() @@ -187,21 +246,22 @@ def test_3_simple(self): self.assertEqual(False, self.tc.is_authenticated()) self.assertEqual(False, self.ts.is_authenticated()) self.ts.start_server(event, server) - self.tc.connect(hostkey=public_host_key, - username='slowdive', password='pygmalion') + self.tc.connect( + hostkey=public_host_key, username="slowdive", password="pygmalion" + ) event.wait(1.0) self.assertTrue(event.is_set()) self.assertTrue(self.ts.is_active()) - self.assertEqual('slowdive', self.tc.get_username()) - self.assertEqual('slowdive', self.ts.get_username()) + self.assertEqual("slowdive", self.tc.get_username()) + self.assertEqual("slowdive", self.ts.get_username()) self.assertEqual(True, self.tc.is_authenticated()) self.assertEqual(True, self.ts.is_authenticated()) - def test_3a_long_banner(self): + def test_long_banner(self): """ verify that a long banner doesn't mess up the handshake. """ - host_key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) public_host_key = RSAKey(data=host_key.asbytes()) self.ts.add_server_key(host_key) event = threading.Event() @@ -209,23 +269,26 @@ def test_3a_long_banner(self): self.assertTrue(not event.is_set()) self.socks.send(LONG_BANNER) self.ts.start_server(event, server) - self.tc.connect(hostkey=public_host_key, - username='slowdive', password='pygmalion') + self.tc.connect( + hostkey=public_host_key, username="slowdive", password="pygmalion" + ) event.wait(1.0) self.assertTrue(event.is_set()) self.assertTrue(self.ts.is_active()) - def test_4_special(self): + def test_special(self): """ verify that the client can demand odd handshake settings, and can renegotiate keys in mid-stream. """ + def force_algorithms(options): - options.ciphers = ('aes256-cbc',) - options.digests = ('hmac-md5-96',) + options.ciphers = ("aes256-cbc",) + options.digests = ("hmac-md5-96",) + self.setup_test_server(client_options=force_algorithms) - self.assertEqual('aes256-cbc', self.tc.local_cipher) - self.assertEqual('aes256-cbc', self.tc.remote_cipher) + self.assertEqual("aes256-cbc", self.tc.local_cipher) + self.assertEqual("aes256-cbc", self.tc.remote_cipher) self.assertEqual(12, self.tc.packetizer.get_mac_size_out()) self.assertEqual(12, self.tc.packetizer.get_mac_size_in()) @@ -233,17 +296,18 @@ def force_algorithms(options): self.tc.renegotiate_keys() self.ts.send_ignore(1024) - def test_5_keepalive(self): + @slow + def test_keepalive(self): """ verify that the keepalive will be sent. """ self.setup_test_server() - self.assertEqual(None, getattr(self.server, '_global_request', None)) + self.assertEqual(None, getattr(self.server, "_global_request", None)) self.tc.set_keepalive(1) time.sleep(2) - self.assertEqual('keepalive@lag.net', self.server._global_request) + self.assertEqual("keepalive@lag.net", self.server._global_request) - def test_6_exec_command(self): + def test_exec_command(self): """ verify that exec_command() does something reasonable. """ @@ -252,40 +316,42 @@ def test_6_exec_command(self): chan = self.tc.open_session() schan = self.ts.accept(1.0) try: - chan.exec_command(b'command contains \xfc and is not a valid UTF-8 string') + chan.exec_command( + b"command contains \xfc and is not a valid UTF-8 string" + ) self.assertTrue(False) except SSHException: pass chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) - schan.send('Hello there.\n') - schan.send_stderr('This is on stderr.\n') + schan.send("Hello there.\n") + schan.send_stderr("This is on stderr.\n") schan.close() f = chan.makefile() - self.assertEqual('Hello there.\n', f.readline()) - self.assertEqual('', f.readline()) + self.assertEqual("Hello there.\n", f.readline()) + self.assertEqual("", f.readline()) f = chan.makefile_stderr() - self.assertEqual('This is on stderr.\n', f.readline()) - self.assertEqual('', f.readline()) + self.assertEqual("This is on stderr.\n", f.readline()) + self.assertEqual("", f.readline()) # now try it with combined stdout/stderr chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) - schan.send('Hello there.\n') - schan.send_stderr('This is on stderr.\n') + schan.send("Hello there.\n") + schan.send_stderr("This is on stderr.\n") schan.close() chan.set_combine_stderr(True) f = chan.makefile() - self.assertEqual('Hello there.\n', f.readline()) - self.assertEqual('This is on stderr.\n', f.readline()) - self.assertEqual('', f.readline()) - - def test_6a_channel_can_be_used_as_context_manager(self): + self.assertEqual("Hello there.\n", f.readline()) + self.assertEqual("This is on stderr.\n", f.readline()) + self.assertEqual("", f.readline()) + + def test_channel_can_be_used_as_context_manager(self): """ verify that exec_command() does something reasonable. """ @@ -293,15 +359,15 @@ def test_6a_channel_can_be_used_as_context_manager(self): with self.tc.open_session() as chan: with self.ts.accept(1.0) as schan: - chan.exec_command('yes') - schan.send('Hello there.\n') + chan.exec_command("yes") + schan.send("Hello there.\n") schan.close() f = chan.makefile() - self.assertEqual('Hello there.\n', f.readline()) - self.assertEqual('', f.readline()) + self.assertEqual("Hello there.\n", f.readline()) + self.assertEqual("", f.readline()) - def test_7_invoke_shell(self): + def test_invoke_shell(self): """ verify that invoke_shell() does something reasonable. """ @@ -309,24 +375,24 @@ def test_7_invoke_shell(self): chan = self.tc.open_session() chan.invoke_shell() schan = self.ts.accept(1.0) - chan.send('communist j. cat\n') + chan.send("communist j. cat\n") f = schan.makefile() - self.assertEqual('communist j. cat\n', f.readline()) + self.assertEqual("communist j. cat\n", f.readline()) chan.close() - self.assertEqual('', f.readline()) + self.assertEqual("", f.readline()) - def test_8_channel_exception(self): + def test_channel_exception(self): """ verify that ChannelException is thrown for a bad open-channel request. """ self.setup_test_server() try: - chan = self.tc.open_channel('bogus') - self.fail('expected exception') + self.tc.open_channel("bogus") + self.fail("expected exception") except ChannelException as e: self.assertTrue(e.code == OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED) - def test_9_exit_status(self): + def test_exit_status(self): """ verify that get_exit_status() works. """ @@ -334,8 +400,8 @@ def test_9_exit_status(self): chan = self.tc.open_session() schan = self.ts.accept(1.0) - chan.exec_command('yes') - schan.send('Hello there.\n') + chan.exec_command("yes") + schan.send("Hello there.\n") self.assertTrue(not chan.exit_status_ready()) # trigger an EOF schan.shutdown_read() @@ -344,8 +410,8 @@ def test_9_exit_status(self): schan.close() f = chan.makefile() - self.assertEqual('Hello there.\n', f.readline()) - self.assertEqual('', f.readline()) + self.assertEqual("Hello there.\n", f.readline()) + self.assertEqual("", f.readline()) count = 0 while not chan.exit_status_ready(): time.sleep(0.1) @@ -355,7 +421,7 @@ def test_9_exit_status(self): self.assertEqual(23, chan.recv_exit_status()) chan.close() - def test_A_select(self): + def test_select(self): """ verify that select() on a channel works. """ @@ -370,7 +436,7 @@ def test_A_select(self): self.assertEqual([], w) self.assertEqual([], e) - schan.send('hello\n') + schan.send("hello\n") # something should be ready now (give it 1 second to appear) for i in range(10): @@ -382,7 +448,7 @@ def test_A_select(self): self.assertEqual([], w) self.assertEqual([], e) - self.assertEqual(b'hello\n', chan.recv(6)) + self.assertEqual(b"hello\n", chan.recv(6)) # and, should be dead again now r, w, e = select.select([chan], [], [], 0.1) @@ -410,19 +476,19 @@ def test_A_select(self): # ...and now is closed. self.assertEqual(True, p._closed) - def test_B_renegotiate(self): + def test_renegotiate(self): """ verify that a transport can correctly renegotiate mid-stream. """ self.setup_test_server() self.tc.packetizer.REKEY_BYTES = 16384 chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) self.assertEqual(self.tc.H, self.tc.session_id) for i in range(20): - chan.send('x' * 1024) + chan.send("x" * 1024) chan.close() # allow a few seconds for the rekeying to complete @@ -434,131 +500,142 @@ def test_B_renegotiate(self): schan.close() - def test_C_compression(self): + def test_compression(self): """ verify that zlib compression is basically working. """ + def force_compression(o): - o.compression = ('zlib',) + o.compression = ("zlib",) + self.setup_test_server(force_compression, force_compression) chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) bytes = self.tc.packetizer._Packetizer__sent_bytes - chan.send('x' * 1024) + chan.send("x" * 1024) bytes2 = self.tc.packetizer._Packetizer__sent_bytes - block_size = self.tc._cipher_info[self.tc.local_cipher]['block-size'] - mac_size = self.tc._mac_info[self.tc.local_mac]['size'] - # tests show this is actually compressed to *52 bytes*! including packet overhead! nice!! :) + block_size = self.tc._cipher_info[self.tc.local_cipher]["block-size"] + mac_size = self.tc._mac_info[self.tc.local_mac]["size"] + # tests show this is actually compressed to *52 bytes*! including + # packet overhead! nice!! :) self.assertTrue(bytes2 - bytes < 1024) self.assertEqual(16 + block_size + mac_size, bytes2 - bytes) chan.close() schan.close() - def test_D_x11(self): + def test_x11(self): """ verify that an x11 port can be requested and opened. """ self.setup_test_server() chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) requested = [] + def handler(c, addr_port): addr, port = addr_port requested.append((addr, port)) self.tc._queue_incoming_channel(c) - self.assertEqual(None, getattr(self.server, '_x11_screen_number', None)) + self.assertEqual( + None, getattr(self.server, "_x11_screen_number", None) + ) cookie = chan.request_x11(0, single_connection=True, handler=handler) self.assertEqual(0, self.server._x11_screen_number) - self.assertEqual('MIT-MAGIC-COOKIE-1', self.server._x11_auth_protocol) + self.assertEqual("MIT-MAGIC-COOKIE-1", self.server._x11_auth_protocol) self.assertEqual(cookie, self.server._x11_auth_cookie) self.assertEqual(True, self.server._x11_single_connection) - x11_server = self.ts.open_x11_channel(('localhost', 6093)) + x11_server = self.ts.open_x11_channel(("localhost", 6093)) x11_client = self.tc.accept() - self.assertEqual('localhost', requested[0][0]) + self.assertEqual("localhost", requested[0][0]) self.assertEqual(6093, requested[0][1]) - x11_server.send('hello') - self.assertEqual(b'hello', x11_client.recv(5)) + x11_server.send("hello") + self.assertEqual(b"hello", x11_client.recv(5)) x11_server.close() x11_client.close() chan.close() schan.close() - def test_E_reverse_port_forwarding(self): + def test_reverse_port_forwarding(self): """ verify that a client can ask the server to open a reverse port for forwarding. """ self.setup_test_server() chan = self.tc.open_session() - chan.exec_command('yes') - schan = self.ts.accept(1.0) + chan.exec_command("yes") + self.ts.accept(1.0) requested = [] + def handler(c, origin_addr_port, server_addr_port): requested.append(origin_addr_port) requested.append(server_addr_port) self.tc._queue_incoming_channel(c) - port = self.tc.request_port_forward('127.0.0.1', 0, handler) + port = self.tc.request_port_forward("127.0.0.1", 0, handler) self.assertEqual(port, self.server._listen.getsockname()[1]) cs = socket.socket() - cs.connect(('127.0.0.1', port)) + cs.connect(("127.0.0.1", port)) ss, _ = self.server._listen.accept() - sch = self.ts.open_forwarded_tcpip_channel(ss.getsockname(), ss.getpeername()) + sch = self.ts.open_forwarded_tcpip_channel( + ss.getsockname(), ss.getpeername() + ) cch = self.tc.accept() - sch.send('hello') - self.assertEqual(b'hello', cch.recv(5)) + sch.send("hello") + self.assertEqual(b"hello", cch.recv(5)) sch.close() cch.close() ss.close() cs.close() # now cancel it. - self.tc.cancel_port_forward('127.0.0.1', port) + self.tc.cancel_port_forward("127.0.0.1", port) self.assertTrue(self.server._listen is None) - def test_F_port_forwarding(self): + def test_port_forwarding(self): """ verify that a client can forward new connections from a locally- forwarded port. """ self.setup_test_server() chan = self.tc.open_session() - chan.exec_command('yes') - schan = self.ts.accept(1.0) + chan.exec_command("yes") + self.ts.accept(1.0) # open a port on the "server" that the client will ask to forward to. greeting_server = socket.socket() - greeting_server.bind(('127.0.0.1', 0)) + greeting_server.bind(("127.0.0.1", 0)) greeting_server.listen(1) greeting_port = greeting_server.getsockname()[1] - cs = self.tc.open_channel('direct-tcpip', ('127.0.0.1', greeting_port), ('', 9000)) + cs = self.tc.open_channel( + "direct-tcpip", ("127.0.0.1", greeting_port), ("", 9000) + ) sch = self.ts.accept(1.0) cch = socket.socket() cch.connect(self.server._tcpip_dest) ss, _ = greeting_server.accept() - ss.send(b'Hello!\n') + ss.send(b"Hello!\n") ss.close() sch.send(cch.recv(8192)) sch.close() - self.assertEqual(b'Hello!\n', cs.recv(7)) + self.assertEqual(b"Hello!\n", cs.recv(7)) cs.close() - def test_G_stderr_select(self): + def test_stderr_select(self): """ verify that select() on a channel works even if only stderr is receiving data. @@ -574,7 +651,7 @@ def test_G_stderr_select(self): self.assertEqual([], w) self.assertEqual([], e) - schan.send_stderr('hello\n') + schan.send_stderr("hello\n") # something should be ready now (give it 1 second to appear) for i in range(10): @@ -586,7 +663,7 @@ def test_G_stderr_select(self): self.assertEqual([], w) self.assertEqual([], e) - self.assertEqual(b'hello\n', chan.recv_stderr(6)) + self.assertEqual(b"hello\n", chan.recv_stderr(6)) # and, should be dead again now r, w, e = select.select([chan], [], [], 0.1) @@ -597,7 +674,7 @@ def test_G_stderr_select(self): schan.close() chan.close() - def test_H_send_ready(self): + def test_send_ready(self): """ verify that send_ready() indicates when a send would not block. """ @@ -608,8 +685,8 @@ def test_H_send_ready(self): self.assertEqual(chan.send_ready(), True) total = 0 - K = '*' * 1024 - limit = 1+(64 * 2 ** 15) + K = "*" * 1024 + limit = 1 + (64 * 2 ** 15) while total < limit: chan.send(K) total += len(K) @@ -621,9 +698,10 @@ def test_H_send_ready(self): chan.close() self.assertEqual(chan.send_ready(), True) - def test_I_rekey_deadlock(self): + def test_rekey_deadlock(self): """ - Regression test for deadlock when in-transit messages are received after MSG_KEXINIT is sent + Regression test for deadlock when in-transit messages are received + after MSG_KEXINIT is sent Note: When this test fails, it may leak threads. """ @@ -646,12 +724,15 @@ def test_I_rekey_deadlock(self): # MSG_KEXINIT to the remote host. # # On the remote host (using any SSH implementation): - # 5. The MSG_CHANNEL_DATA is received, and MSG_CHANNEL_WINDOW_ADJUST is sent. - # 6. The MSG_KEXINIT is received, and a corresponding MSG_KEXINIT is sent. + # 5. The MSG_CHANNEL_DATA is received, and MSG_CHANNEL_WINDOW_ADJUST + # is sent. + # 6. The MSG_KEXINIT is received, and a corresponding MSG_KEXINIT is + # sent. # # In the main thread: # 7. The user's program calls Channel.send(). - # 8. Channel.send acquires Channel.lock, then calls Transport._send_user_message(). + # 8. Channel.send acquires Channel.lock, then calls + # Transport._send_user_message(). # 9. Transport._send_user_message waits for Transport.clear_to_send # to be set (i.e., it waits for re-keying to complete). # Channel.lock is still held. @@ -672,7 +753,9 @@ def test_I_rekey_deadlock(self): class SendThread(threading.Thread): def __init__(self, chan, iterations, done_event): - threading.Thread.__init__(self, None, None, self.__class__.__name__) + threading.Thread.__init__( + self, None, None, self.__class__.__name__ + ) self.setDaemon(True) self.chan = chan self.iterations = iterations @@ -682,11 +765,11 @@ def __init__(self, chan, iterations, done_event): def run(self): try: - for i in range(1, 1+self.iterations): + for i in range(1, 1 + self.iterations): if self.done_event.is_set(): break self.watchdog_event.set() - #print i, "SEND" + # print i, "SEND" self.chan.send("x" * 2048) finally: self.done_event.set() @@ -694,7 +777,9 @@ def run(self): class ReceiveThread(threading.Thread): def __init__(self, chan, done_event): - threading.Thread.__init__(self, None, None, self.__class__.__name__) + threading.Thread.__init__( + self, None, None, self.__class__.__name__ + ) self.setDaemon(True) self.chan = chan self.done_event = done_event @@ -717,30 +802,34 @@ def run(self): self.ts.packetizer.REKEY_BYTES = 2048 chan = self.tc.open_session() - chan.exec_command('yes') + chan.exec_command("yes") schan = self.ts.accept(1.0) # Monkey patch the client's Transport._handler_table so that the client # sends MSG_CHANNEL_WINDOW_ADJUST whenever it receives an initial # MSG_KEXINIT. This is used to simulate the effect of network latency # on a real MSG_CHANNEL_WINDOW_ADJUST message. - self.tc._handler_table = self.tc._handler_table.copy() # copy per-class dictionary + self.tc._handler_table = ( + self.tc._handler_table.copy() + ) # copy per-class dictionary _negotiate_keys = self.tc._handler_table[MSG_KEXINIT] + def _negotiate_keys_wrapper(self, m): - if self.local_kex_init is None: # Remote side sent KEXINIT + if self.local_kex_init is None: # Remote side sent KEXINIT # Simulate in-transit MSG_CHANNEL_WINDOW_ADJUST by sending it # before responding to the incoming MSG_KEXINIT. m2 = Message() m2.add_byte(cMSG_CHANNEL_WINDOW_ADJUST) m2.add_int(chan.remote_chanid) - m2.add_int(1) # bytes to add + m2.add_int(1) # bytes to add self._send_message(m2) return _negotiate_keys(self, m) + self.tc._handler_table[MSG_KEXINIT] = _negotiate_keys_wrapper # Parameters for the test - iterations = 500 # The deadlock does not happen every time, but it - # should after many iterations. + iterations = 500 # The deadlock does not happen every time, but it + # should after many iterations. timeout = 5 # This event is set when the test is completed @@ -778,25 +867,30 @@ def _negotiate_keys_wrapper(self, m): schan.close() chan.close() - def test_J_sanitze_packet_size(self): + def test_sanitze_packet_size(self): """ verify that we conform to the rfc of packet and window sizes. """ - for val, correct in [(4095, MIN_PACKET_SIZE), - (None, DEFAULT_MAX_PACKET_SIZE), - (2**32, MAX_WINDOW_SIZE)]: + for val, correct in [ + (4095, MIN_PACKET_SIZE), + (None, DEFAULT_MAX_PACKET_SIZE), + (2 ** 32, MAX_WINDOW_SIZE), + ]: self.assertEqual(self.tc._sanitize_packet_size(val), correct) - def test_K_sanitze_window_size(self): + def test_sanitze_window_size(self): """ verify that we conform to the rfc of packet and window sizes. """ - for val, correct in [(32767, MIN_WINDOW_SIZE), - (None, DEFAULT_WINDOW_SIZE), - (2**32, MAX_WINDOW_SIZE)]: + for val, correct in [ + (32767, MIN_WINDOW_SIZE), + (None, DEFAULT_WINDOW_SIZE), + (2 ** 32, MAX_WINDOW_SIZE), + ]: self.assertEqual(self.tc._sanitize_window_size(val), correct) - def test_L_handshake_timeout(self): + @slow + def test_handshake_timeout(self): """ verify that we can get a hanshake timeout. """ @@ -811,12 +905,13 @@ class SlowPacketizer(Packetizer): def read_message(self): time.sleep(1) return super(SlowPacketizer, self).read_message() + # NOTE: prettttty sure since the replaced .packetizer Packetizer is now # no longer doing anything with its copy of the socket...everything'll # be fine. Even tho it's a bit squicky. self.tc.packetizer = SlowPacketizer(self.tc.sock) # Continue with regular test red tape. - host_key = RSAKey.from_private_key_file(test_path('test_rsa.key')) + host_key = RSAKey.from_private_key_file(_support("test_rsa.key")) public_host_key = RSAKey(data=host_key.asbytes()) self.ts.add_server_key(host_key) event = threading.Event() @@ -824,12 +919,15 @@ def read_message(self): self.assertTrue(not event.is_set()) self.tc.handshake_timeout = 0.000000000001 self.ts.start_server(event, server) - self.assertRaises(EOFError, self.tc.connect, - hostkey=public_host_key, - username='slowdive', - password='pygmalion') - - def test_M_select_after_close(self): + self.assertRaises( + EOFError, + self.tc.connect, + hostkey=public_host_key, + username="slowdive", + password="pygmalion", + ) + + def test_select_after_close(self): """ verify that select works when a channel is already closed. """ @@ -846,3 +944,510 @@ def test_M_select_after_close(self): self.assertEqual([chan], r) self.assertEqual([], w) self.assertEqual([], e) + + def test_channel_send_misc(self): + """ + verify behaviours sending various instances to a channel + """ + self.setup_test_server() + text = u"\xa7 slice me nicely" + with self.tc.open_session() as chan: + schan = self.ts.accept(1.0) + if schan is None: + self.fail("Test server transport failed to accept") + sfile = schan.makefile() + + # TypeError raised on non string or buffer type + self.assertRaises(TypeError, chan.send, object()) + self.assertRaises(TypeError, chan.sendall, object()) + + # sendall() accepts a unicode instance + chan.sendall(text) + expected = text.encode("utf-8") + self.assertEqual(sfile.read(len(expected)), expected) + + @needs_builtin("buffer") + def test_channel_send_buffer(self): + """ + verify sending buffer instances to a channel + """ + self.setup_test_server() + data = 3 * b"some test data\n whole" + with self.tc.open_session() as chan: + schan = self.ts.accept(1.0) + if schan is None: + self.fail("Test server transport failed to accept") + sfile = schan.makefile() + + # send() accepts buffer instances + sent = 0 + while sent < len(data): + sent += chan.send(buffer(data, sent, 8)) # noqa + self.assertEqual(sfile.read(len(data)), data) + + # sendall() accepts a buffer instance + chan.sendall(buffer(data)) # noqa + self.assertEqual(sfile.read(len(data)), data) + + @needs_builtin("memoryview") + def test_channel_send_memoryview(self): + """ + verify sending memoryview instances to a channel + """ + self.setup_test_server() + data = 3 * b"some test data\n whole" + with self.tc.open_session() as chan: + schan = self.ts.accept(1.0) + if schan is None: + self.fail("Test server transport failed to accept") + sfile = schan.makefile() + + # send() accepts memoryview slices + sent = 0 + view = memoryview(data) + while sent < len(view): + sent += chan.send(view[sent : sent + 8]) + self.assertEqual(sfile.read(len(data)), data) + + # sendall() accepts a memoryview instance + chan.sendall(memoryview(data)) + self.assertEqual(sfile.read(len(data)), data) + + def test_server_rejects_open_channel_without_auth(self): + try: + self.setup_test_server(connect_kwargs={}) + self.tc.open_session() + except ChannelException as e: + assert e.code == OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED + else: + assert False, "Did not raise ChannelException!" + + def test_server_rejects_arbitrary_global_request_without_auth(self): + self.setup_test_server(connect_kwargs={}) + # NOTE: this dummy global request kind would normally pass muster + # from the test server. + self.tc.global_request("acceptable") + # Global requests never raise exceptions, even on failure (not sure why + # this was the original design...ugh.) Best we can do to tell failure + # happened is that the client transport's global_response was set back + # to None; if it had succeeded, it would be the response Message. + err = "Unauthed global response incorrectly succeeded!" + assert self.tc.global_response is None, err + + def test_server_rejects_port_forward_without_auth(self): + # NOTE: at protocol level port forward requests are treated same as a + # regular global request, but Paramiko server implements a special-case + # method for it, so it gets its own test. (plus, THAT actually raises + # an exception on the client side, unlike the general case...) + self.setup_test_server(connect_kwargs={}) + try: + self.tc.request_port_forward("localhost", 1234) + except SSHException as e: + assert "forwarding request denied" in str(e) + else: + assert False, "Did not raise SSHException!" + + def _send_unimplemented(self, server_is_sender): + self.setup_test_server() + sender, recipient = self.tc, self.ts + if server_is_sender: + sender, recipient = self.ts, self.tc + recipient._send_message = Mock() + msg = Message() + msg.add_byte(cMSG_UNIMPLEMENTED) + sender._send_message(msg) + # TODO: I hate this but I literally don't see a good way to know when + # the recipient has received the sender's message (there are no + # existing threading events in play that work for this), esp in this + # case where we don't WANT a response (as otherwise we could + # potentially try blocking on the sender's receipt of a reply...maybe). + time.sleep(0.1) + assert not recipient._send_message.called + + def test_server_does_not_respond_to_MSG_UNIMPLEMENTED(self): + self._send_unimplemented(server_is_sender=False) + + def test_client_does_not_respond_to_MSG_UNIMPLEMENTED(self): + self._send_unimplemented(server_is_sender=True) + + def _send_client_message(self, message_type): + self.setup_test_server(connect_kwargs={}) + self.ts._send_message = Mock() + # NOTE: this isn't 100% realistic (most of these message types would + # have actual other fields in 'em) but it suffices to test the level of + # message dispatch we're interested in here. + msg = Message() + # TODO: really not liking the whole cMSG_XXX vs MSG_XXX duality right + # now, esp since the former is almost always just byte_chr(the + # latter)...but since that's the case... + msg.add_byte(byte_chr(message_type)) + self.tc._send_message(msg) + # No good way to actually wait for server action (see above tests re: + # MSG_UNIMPLEMENTED). Grump. + time.sleep(0.1) + + def _expect_unimplemented(self): + # Ensure MSG_UNIMPLEMENTED was sent (implies it hit end of loop instead + # of truly handling the given message). + # NOTE: When bug present, this will actually be the first thing that + # fails (since in many cases actual message handling doesn't involve + # sending a message back right away). + assert self.ts._send_message.call_count == 1 + reply = self.ts._send_message.call_args[0][0] + reply.rewind() # Because it's pre-send, not post-receive + assert reply.get_byte() == cMSG_UNIMPLEMENTED + + def test_server_transports_reject_client_message_types(self): + # TODO: handle Transport's own tables too, not just its inner auth + # handler's table. See TODOs in auth_handler.py + for message_type in AuthHandler._client_handler_table: + self._send_client_message(message_type) + self._expect_unimplemented() + # Reset for rest of loop + self.tearDown() + self.setUp() + + def test_server_rejects_client_MSG_USERAUTH_SUCCESS(self): + self._send_client_message(MSG_USERAUTH_SUCCESS) + # Sanity checks + assert not self.ts.authenticated + assert not self.ts.auth_handler.authenticated + # Real fix's behavior + self._expect_unimplemented() + + +class AlgorithmDisablingTests(unittest.TestCase): + def test_preferred_lists_default_to_private_attribute_contents(self): + t = Transport(sock=Mock()) + assert t.preferred_ciphers == t._preferred_ciphers + assert t.preferred_macs == t._preferred_macs + assert t.preferred_keys == t._preferred_keys + assert t.preferred_kex == t._preferred_kex + + def test_preferred_lists_filter_disabled_algorithms(self): + t = Transport( + sock=Mock(), + disabled_algorithms={ + "ciphers": ["aes128-cbc"], + "macs": ["hmac-md5"], + "keys": ["ssh-dss"], + "kex": ["diffie-hellman-group14-sha256"], + }, + ) + assert "aes128-cbc" in t._preferred_ciphers + assert "aes128-cbc" not in t.preferred_ciphers + assert "hmac-md5" in t._preferred_macs + assert "hmac-md5" not in t.preferred_macs + assert "ssh-dss" in t._preferred_keys + assert "ssh-dss" not in t.preferred_keys + assert "diffie-hellman-group14-sha256" in t._preferred_kex + assert "diffie-hellman-group14-sha256" not in t.preferred_kex + + def test_implementation_refers_to_public_algo_lists(self): + t = Transport( + sock=Mock(), + disabled_algorithms={ + "ciphers": ["aes128-cbc"], + "macs": ["hmac-md5"], + "keys": ["ssh-dss"], + "kex": ["diffie-hellman-group14-sha256"], + "compression": ["zlib"], + }, + ) + # Enable compression cuz otherwise disabling one option for it makes no + # sense... + t.use_compression(True) + # Effectively a random spot check, but kex init touches most/all of the + # algorithm lists so it's a good spot. + t._send_message = Mock() + t._send_kex_init() + # Cribbed from Transport._parse_kex_init, which didn't feel worth + # refactoring given all the vars involved :( + m = t._send_message.call_args[0][0] + m.rewind() + m.get_byte() # the msg type + m.get_bytes(16) # cookie, discarded + kexen = m.get_list() + server_keys = m.get_list() + ciphers = m.get_list() + m.get_list() + macs = m.get_list() + m.get_list() + compressions = m.get_list() + # OK, now we can actually check that our disabled algos were not + # included (as this message includes the full lists) + assert "aes128-cbc" not in ciphers + assert "hmac-md5" not in macs + assert "ssh-dss" not in server_keys + assert "diffie-hellman-group14-sha256" not in kexen + assert "zlib" not in compressions + + +@contextmanager +def server( + hostkey=None, + init=None, + server_init=None, + client_init=None, + connect=None, + pubkeys=None, + catch_error=False, +): + """ + SSH server contextmanager for testing. + + :param hostkey: + Host key to use for the server; if None, loads + ``test_rsa.key``. + :param init: + Default `Transport` constructor kwargs to use for both sides. + :param server_init: + Extends and/or overrides ``init`` for server transport only. + :param client_init: + Extends and/or overrides ``init`` for client transport only. + :param connect: + Kwargs to use for ``connect()`` on the client. + :param pubkeys: + List of public keys for auth. + :param catch_error: + Whether to capture connection errors & yield from contextmanager. + Necessary for connection_time exception testing. + """ + if init is None: + init = {} + if server_init is None: + server_init = {} + if client_init is None: + client_init = {} + if connect is None: + connect = dict(username="slowdive", password="pygmalion") + socks = LoopSocket() + sockc = LoopSocket() + sockc.link(socks) + tc = Transport(sockc, **dict(init, **client_init)) + ts = Transport(socks, **dict(init, **server_init)) + + if hostkey is None: + hostkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + ts.add_server_key(hostkey) + event = threading.Event() + server = NullServer(allowed_keys=pubkeys) + assert not event.is_set() + assert not ts.is_active() + assert tc.get_username() is None + assert ts.get_username() is None + assert not tc.is_authenticated() + assert not ts.is_authenticated() + + err = None + # Trap errors and yield instead of raising right away; otherwise callers + # cannot usefully deal with problems at connect time which stem from errors + # in the server side. + try: + ts.start_server(event, server) + tc.connect(**connect) + + event.wait(1.0) + assert event.is_set() + assert ts.is_active() + assert tc.is_active() + + except Exception as e: + if not catch_error: + raise + err = e + + yield (tc, ts, err) if catch_error else (tc, ts) + + tc.close() + ts.close() + socks.close() + sockc.close() + + +_disable_sha2 = dict( + disabled_algorithms=dict(keys=["rsa-sha2-256", "rsa-sha2-512"]) +) +_disable_sha1 = dict(disabled_algorithms=dict(keys=["ssh-rsa"])) +_disable_sha2_pubkey = dict( + disabled_algorithms=dict(pubkeys=["rsa-sha2-256", "rsa-sha2-512"]) +) +_disable_sha1_pubkey = dict(disabled_algorithms=dict(pubkeys=["ssh-rsa"])) + + +class TestSHA2SignatureKeyExchange(unittest.TestCase): + # NOTE: these all rely on the default server() hostkey being RSA + # NOTE: these rely on both sides being properly implemented re: agreed-upon + # hostkey during kex being what's actually used. Truly proving that eg + # SHA512 was used, is quite difficult w/o super gross hacks. However, there + # are new tests in test_pkey.py which use known signature blobs to prove + # the SHA2 family was in fact used! + + def test_base_case_ssh_rsa_still_used_as_fallback(self): + # Prove that ssh-rsa is used if either, or both, participants have SHA2 + # algorithms disabled + for which in ("init", "client_init", "server_init"): + with server(**{which: _disable_sha2}) as (tc, _): + assert tc.host_key_type == "ssh-rsa" + + def test_kex_with_sha2_512(self): + # It's the default! + with server() as (tc, _): + assert tc.host_key_type == "rsa-sha2-512" + + def test_kex_with_sha2_256(self): + # No 512 -> you get 256 + with server( + init=dict(disabled_algorithms=dict(keys=["rsa-sha2-512"])) + ) as (tc, _): + assert tc.host_key_type == "rsa-sha2-256" + + def _incompatible_peers(self, client_init, server_init): + with server( + client_init=client_init, server_init=server_init, catch_error=True + ) as (tc, ts, err): + # If neither side blew up then that's bad! + assert err is not None + # If client side blew up first, it'll be straightforward + if isinstance(err, IncompatiblePeer): + pass + # If server side blew up first, client sees EOF & we need to check + # the server transport for its saved error (otherwise it can only + # appear in log output) + elif isinstance(err, EOFError): + assert ts.saved_exception is not None + assert isinstance(ts.saved_exception, IncompatiblePeer) + # If it was something else, welp + else: + raise err + + def test_client_sha2_disabled_server_sha1_disabled_no_match(self): + self._incompatible_peers( + client_init=_disable_sha2, server_init=_disable_sha1 + ) + + def test_client_sha1_disabled_server_sha2_disabled_no_match(self): + self._incompatible_peers( + client_init=_disable_sha1, server_init=_disable_sha2 + ) + + def test_explicit_client_hostkey_not_limited(self): + # Be very explicit about the hostkey on BOTH ends, + # and ensure it still ends up choosing sha2-512. + # (This is a regression test vs previous implementation which overwrote + # the entire preferred-hostkeys structure when given an explicit key as + # a client.) + hostkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server(hostkey=hostkey, connect=dict(hostkey=hostkey)) as (tc, _): + assert tc.host_key_type == "rsa-sha2-512" + + +class TestExtInfo(unittest.TestCase): + def test_ext_info_handshake(self): + with server() as (tc, _): + kex = tc._get_latest_kex_init() + assert kex["kex_algo_list"][-1] == "ext-info-c" + assert tc.server_extensions == { + "server-sig-algs": b"ssh-ed25519,ecdsa-sha2-nistp256,ecdsa-sha2-nistp384,ecdsa-sha2-nistp521,rsa-sha2-512,rsa-sha2-256,ssh-rsa,ssh-dss" # noqa + } + + def test_client_uses_server_sig_algs_for_pubkey_auth(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + server_init=dict( + disabled_algorithms=dict(pubkeys=["rsa-sha2-512"]) + ), + ) as (tc, _): + assert tc.is_authenticated() + # Client settled on 256 despite itself not having 512 disabled + assert tc._agreed_pubkey_algorithm == "rsa-sha2-256" + + +# TODO: these could move into test_auth.py but that badly needs refactoring +# with this module anyways... +class TestSHA2SignaturePubkeys(unittest.TestCase): + def test_pubkey_auth_honors_disabled_algorithms(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + init=dict( + disabled_algorithms=dict( + pubkeys=["ssh-rsa", "rsa-sha2-256", "rsa-sha2-512"] + ) + ), + catch_error=True, + ) as (_, _, err): + assert isinstance(err, SSHException) + assert "no RSA pubkey algorithms" in str(err) + + def test_client_sha2_disabled_server_sha1_disabled_no_match(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + client_init=_disable_sha2_pubkey, + server_init=_disable_sha1_pubkey, + catch_error=True, + ) as (tc, ts, err): + assert isinstance(err, AuthenticationException) + + def test_client_sha1_disabled_server_sha2_disabled_no_match(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + client_init=_disable_sha1_pubkey, + server_init=_disable_sha2_pubkey, + catch_error=True, + ) as (tc, ts, err): + assert isinstance(err, AuthenticationException) + + def test_ssh_rsa_still_used_when_sha2_disabled(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + # NOTE: this works because key obj comparison uses public bytes + # TODO: would be nice for PKey to grow a legit "give me another obj of + # same class but just the public bits" using asbytes() + with server( + pubkeys=[privkey], connect=dict(pkey=privkey), init=_disable_sha2 + ) as (tc, _): + assert tc.is_authenticated() + + def test_sha2_512(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + init=dict( + disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-256"]) + ), + ) as (tc, ts): + assert tc.is_authenticated() + assert tc._agreed_pubkey_algorithm == "rsa-sha2-512" + + def test_sha2_256(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + init=dict( + disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-512"]) + ), + ) as (tc, ts): + assert tc.is_authenticated() + assert tc._agreed_pubkey_algorithm == "rsa-sha2-256" + + def test_sha2_256_when_client_only_enables_256(self): + privkey = RSAKey.from_private_key_file(_support("test_rsa.key")) + with server( + pubkeys=[privkey], + connect=dict(pkey=privkey), + # Client-side only; server still accepts all 3. + client_init=dict( + disabled_algorithms=dict(pubkeys=["ssh-rsa", "rsa-sha2-512"]) + ), + ) as (tc, ts): + assert tc.is_authenticated() + assert tc._agreed_pubkey_algorithm == "rsa-sha2-256" diff --git a/tests/test_util.py b/tests/test_util.py index e25f0563e..8ce260d1c 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -26,30 +26,11 @@ from hashlib import sha1 import unittest +import paramiko import paramiko.util -from paramiko.util import lookup_ssh_host_config as host_config, safe_string -from paramiko.py3compat import StringIO, byte_ord, b +from paramiko.util import safe_string +from paramiko.py3compat import byte_ord -# Note some lines in this configuration have trailing spaces on purpose -test_config_file = """\ -Host * - User robey - IdentityFile =~/.ssh/id_rsa - -# comment -Host *.example.com - \tUser bjork -Port=3333 -Host * -""" - -dont_strip_whitespace_please = "\t \t Crazy something dumb " - -test_config_file += dont_strip_whitespace_please -test_config_file += """ -Host spoo.example.com -Crazy something else -""" test_hosts_file = """\ secure.example.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEA1PD6U2/TVxET6lkpKhOk5r\ @@ -61,457 +42,119 @@ """ -# for test 1: -from paramiko import * - - class UtilTest(unittest.TestCase): - def test_1_import(self): + def test_imports(self): """ verify that all the classes can be imported from paramiko. """ - symbols = list(globals().keys()) - self.assertTrue('Transport' in symbols) - self.assertTrue('SSHClient' in symbols) - self.assertTrue('MissingHostKeyPolicy' in symbols) - self.assertTrue('AutoAddPolicy' in symbols) - self.assertTrue('RejectPolicy' in symbols) - self.assertTrue('WarningPolicy' in symbols) - self.assertTrue('SecurityOptions' in symbols) - self.assertTrue('SubsystemHandler' in symbols) - self.assertTrue('Channel' in symbols) - self.assertTrue('RSAKey' in symbols) - self.assertTrue('DSSKey' in symbols) - self.assertTrue('Message' in symbols) - self.assertTrue('SSHException' in symbols) - self.assertTrue('AuthenticationException' in symbols) - self.assertTrue('PasswordRequiredException' in symbols) - self.assertTrue('BadAuthenticationType' in symbols) - self.assertTrue('ChannelException' in symbols) - self.assertTrue('SFTP' in symbols) - self.assertTrue('SFTPFile' in symbols) - self.assertTrue('SFTPHandle' in symbols) - self.assertTrue('SFTPClient' in symbols) - self.assertTrue('SFTPServer' in symbols) - self.assertTrue('SFTPError' in symbols) - self.assertTrue('SFTPAttributes' in symbols) - self.assertTrue('SFTPServerInterface' in symbols) - self.assertTrue('ServerInterface' in symbols) - self.assertTrue('BufferedFile' in symbols) - self.assertTrue('Agent' in symbols) - self.assertTrue('AgentKey' in symbols) - self.assertTrue('HostKeys' in symbols) - self.assertTrue('SSHConfig' in symbols) - self.assertTrue('util' in symbols) - - def test_2_parse_config(self): - global test_config_file - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - self.assertEqual(config._config, - [{'host': ['*'], 'config': {}}, {'host': ['*'], 'config': {'identityfile': ['~/.ssh/id_rsa'], 'user': 'robey'}}, - {'host': ['*.example.com'], 'config': {'user': 'bjork', 'port': '3333'}}, - {'host': ['*'], 'config': {'crazy': 'something dumb'}}, - {'host': ['spoo.example.com'], 'config': {'crazy': 'something else'}}]) - - def test_3_host_config(self): - global test_config_file - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - - for host, values in { - 'irc.danger.com': {'crazy': 'something dumb', - 'hostname': 'irc.danger.com', - 'user': 'robey'}, - 'irc.example.com': {'crazy': 'something dumb', - 'hostname': 'irc.example.com', - 'user': 'robey', - 'port': '3333'}, - 'spoo.example.com': {'crazy': 'something dumb', - 'hostname': 'spoo.example.com', - 'user': 'robey', - 'port': '3333'} - }.items(): - values = dict(values, - hostname=host, - identityfile=[os.path.expanduser("~/.ssh/id_rsa")] - ) - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) + for name in ( + "Agent", + "AgentKey", + "AuthenticationException", + "AutoAddPolicy", + "BadAuthenticationType", + "BufferedFile", + "Channel", + "ChannelException", + "ConfigParseError", + "CouldNotCanonicalize", + "DSSKey", + "HostKeys", + "Message", + "MissingHostKeyPolicy", + "PasswordRequiredException", + "RSAKey", + "RejectPolicy", + "SFTP", + "SFTPAttributes", + "SFTPClient", + "SFTPError", + "SFTPFile", + "SFTPHandle", + "SFTPServer", + "SFTPServerInterface", + "SSHClient", + "SSHConfig", + "SSHConfigDict", + "SSHException", + "SecurityOptions", + "ServerInterface", + "SubsystemHandler", + "Transport", + "WarningPolicy", + "util", + ): + assert name in paramiko.__all__ - def test_4_generate_key_bytes(self): - x = paramiko.util.generate_key_bytes(sha1, b'ABCDEFGH', 'This is my secret passphrase.', 64) - hex = ''.join(['%02x' % byte_ord(c) for c in x]) - self.assertEqual(hex, '9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b') + def test_generate_key_bytes(self): + x = paramiko.util.generate_key_bytes( + sha1, b"ABCDEFGH", "This is my secret passphrase.", 64 + ) + hex = "".join(["%02x" % byte_ord(c) for c in x]) + hexpected = "9110e2f6793b69363e58173e9436b13a5a4b339005741d5c680e505f57d871347b4239f14fb5c46e857d5e100424873ba849ac699cea98d729e57b3e84378e8b" # noqa + assert hex == hexpected - def test_5_host_keys(self): - with open('hostfile.temp', 'w') as f: + def test_host_keys(self): + with open("hostfile.temp", "w") as f: f.write(test_hosts_file) try: - hostdict = paramiko.util.load_host_keys('hostfile.temp') - self.assertEqual(2, len(hostdict)) - self.assertEqual(1, len(list(hostdict.values())[0])) - self.assertEqual(1, len(list(hostdict.values())[1])) - fp = hexlify(hostdict['secure.example.com']['ssh-rsa'].get_fingerprint()).upper() - self.assertEqual(b'E6684DB30E109B67B70FF1DC5C7F1363', fp) + hostdict = paramiko.util.load_host_keys("hostfile.temp") + assert 2 == len(hostdict) + assert 1 == len(list(hostdict.values())[0]) + assert 1 == len(list(hostdict.values())[1]) + fp = hexlify( + hostdict["secure.example.com"]["ssh-rsa"].get_fingerprint() + ).upper() + assert b"E6684DB30E109B67B70FF1DC5C7F1363" == fp finally: - os.unlink('hostfile.temp') - - def test_7_host_config_expose_issue_33(self): - test_config_file = """ -Host www13.* - Port 22 - -Host *.example.com - Port 2222 - -Host * - Port 3333 - """ - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - host = 'www13.example.com' - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - {'hostname': host, 'port': '22'} - ) + os.unlink("hostfile.temp") - def test_8_eintr_retry(self): - self.assertEqual('foo', paramiko.util.retry_on_signal(lambda: 'foo')) + def test_eintr_retry(self): + assert "foo" == paramiko.util.retry_on_signal(lambda: "foo") # Variables that are set by raises_intr intr_errors_remaining = [3] call_count = [0] + def raises_intr(): call_count[0] += 1 if intr_errors_remaining[0] > 0: intr_errors_remaining[0] -= 1 - raise IOError(errno.EINTR, 'file', 'interrupted system call') + raise IOError(errno.EINTR, "file", "interrupted system call") + self.assertTrue(paramiko.util.retry_on_signal(raises_intr) is None) - self.assertEqual(0, intr_errors_remaining[0]) - self.assertEqual(4, call_count[0]) + assert 0 == intr_errors_remaining[0] + assert 4 == call_count[0] def raises_ioerror_not_eintr(): - raise IOError(errno.ENOENT, 'file', 'file not found') - self.assertRaises(IOError, - lambda: paramiko.util.retry_on_signal(raises_ioerror_not_eintr)) - - def raises_other_exception(): - raise AssertionError('foo') - self.assertRaises(AssertionError, - lambda: paramiko.util.retry_on_signal(raises_other_exception)) + raise IOError(errno.ENOENT, "file", "file not found") - def test_9_proxycommand_config_equals_parsing(self): - """ - ProxyCommand should not split on equals signs within the value. - """ - conf = """ -Host space-delimited - ProxyCommand foo bar=biz baz - -Host equals-delimited - ProxyCommand=foo bar=biz baz -""" - f = StringIO(conf) - config = paramiko.util.parse_ssh_config(f) - for host in ('space-delimited', 'equals-delimited'): - self.assertEqual( - host_config(host, config)['proxycommand'], - 'foo bar=biz baz' - ) - - def test_10_proxycommand_interpolation(self): - """ - ProxyCommand should perform interpolation on the value - """ - config = paramiko.util.parse_ssh_config(StringIO(""" -Host specific - Port 37 - ProxyCommand host %h port %p lol - -Host portonly - Port 155 - -Host * - Port 25 - ProxyCommand host %h port %p -""")) - for host, val in ( - ('foo.com', "host foo.com port 25"), - ('specific', "host specific port 37 lol"), - ('portonly', "host portonly port 155"), - ): - self.assertEqual( - host_config(host, config)['proxycommand'], - val - ) - - def test_11_host_config_test_negation(self): - test_config_file = """ -Host www13.* !*.example.com - Port 22 - -Host *.example.com !www13.* - Port 2222 - -Host www13.* - Port 8080 - -Host * - Port 3333 - """ - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - host = 'www13.example.com' - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - {'hostname': host, 'port': '8080'} + self.assertRaises( + IOError, + lambda: paramiko.util.retry_on_signal(raises_ioerror_not_eintr), ) - def test_12_host_config_test_proxycommand(self): - test_config_file = """ -Host proxy-with-equal-divisor-and-space -ProxyCommand = foo=bar - -Host proxy-with-equal-divisor-and-no-space -ProxyCommand=foo=bar - -Host proxy-without-equal-divisor -ProxyCommand foo=bar:%h-%p - """ - for host, values in { - 'proxy-with-equal-divisor-and-space' :{'hostname': 'proxy-with-equal-divisor-and-space', - 'proxycommand': 'foo=bar'}, - 'proxy-with-equal-divisor-and-no-space':{'hostname': 'proxy-with-equal-divisor-and-no-space', - 'proxycommand': 'foo=bar'}, - 'proxy-without-equal-divisor' :{'hostname': 'proxy-without-equal-divisor', - 'proxycommand': - 'foo=bar:proxy-without-equal-divisor-22'} - }.items(): - - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) - - def test_11_host_config_test_identityfile(self): - test_config_file = """ - -IdentityFile id_dsa0 - -Host * -IdentityFile id_dsa1 - -Host dsa2 -IdentityFile id_dsa2 - -Host dsa2* -IdentityFile id_dsa22 - """ - for host, values in { - 'foo' :{'hostname': 'foo', - 'identityfile': ['id_dsa0', 'id_dsa1']}, - 'dsa2' :{'hostname': 'dsa2', - 'identityfile': ['id_dsa0', 'id_dsa1', 'id_dsa2', 'id_dsa22']}, - 'dsa22' :{'hostname': 'dsa22', - 'identityfile': ['id_dsa0', 'id_dsa1', 'id_dsa22']} - }.items(): - - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) + def raises_other_exception(): + raise AssertionError("foo") - def test_12_config_addressfamily_and_lazy_fqdn(self): - """ - Ensure the code path honoring non-'all' AddressFamily doesn't asplode - """ - test_config = """ -AddressFamily inet -IdentityFile something_%l_using_fqdn -""" - config = paramiko.util.parse_ssh_config(StringIO(test_config)) - assert config.lookup('meh') # will die during lookup() if bug regresses + self.assertRaises( + AssertionError, + lambda: paramiko.util.retry_on_signal(raises_other_exception), + ) def test_clamp_value(self): - self.assertEqual(32768, paramiko.util.clamp_value(32767, 32768, 32769)) - self.assertEqual(32767, paramiko.util.clamp_value(32767, 32765, 32769)) - self.assertEqual(32769, paramiko.util.clamp_value(32767, 32770, 32769)) - - def test_13_config_dos_crlf_succeeds(self): - config_file = StringIO("host abcqwerty\r\nHostName 127.0.0.1\r\n") - config = paramiko.SSHConfig() - config.parse(config_file) - self.assertEqual(config.lookup("abcqwerty")["hostname"], "127.0.0.1") - - def test_14_get_hostnames(self): - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - self.assertEqual(config.get_hostnames(), set(['*', '*.example.com', 'spoo.example.com'])) - - def test_quoted_host_names(self): - test_config_file = """\ -Host "param pam" param "pam" - Port 1111 - -Host "param2" - Port 2222 - -Host param3 parara - Port 3333 - -Host param4 "p a r" "p" "par" para - Port 4444 -""" - res = { - 'param pam': {'hostname': 'param pam', 'port': '1111'}, - 'param': {'hostname': 'param', 'port': '1111'}, - 'pam': {'hostname': 'pam', 'port': '1111'}, - - 'param2': {'hostname': 'param2', 'port': '2222'}, - - 'param3': {'hostname': 'param3', 'port': '3333'}, - 'parara': {'hostname': 'parara', 'port': '3333'}, - - 'param4': {'hostname': 'param4', 'port': '4444'}, - 'p a r': {'hostname': 'p a r', 'port': '4444'}, - 'p': {'hostname': 'p', 'port': '4444'}, - 'par': {'hostname': 'par', 'port': '4444'}, - 'para': {'hostname': 'para', 'port': '4444'}, - } - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - for host, values in res.items(): - self.assertEquals( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) - - def test_quoted_params_in_config(self): - test_config_file = """\ -Host "param pam" param "pam" - IdentityFile id_rsa - -Host "param2" - IdentityFile "test rsa key" - -Host param3 parara - IdentityFile id_rsa - IdentityFile "test rsa key" -""" - res = { - 'param pam': {'hostname': 'param pam', 'identityfile': ['id_rsa']}, - 'param': {'hostname': 'param', 'identityfile': ['id_rsa']}, - 'pam': {'hostname': 'pam', 'identityfile': ['id_rsa']}, - - 'param2': {'hostname': 'param2', 'identityfile': ['test rsa key']}, - - 'param3': {'hostname': 'param3', 'identityfile': ['id_rsa', 'test rsa key']}, - 'parara': {'hostname': 'parara', 'identityfile': ['id_rsa', 'test rsa key']}, - } - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - for host, values in res.items(): - self.assertEquals( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) - - def test_quoted_host_in_config(self): - conf = SSHConfig() - correct_data = { - 'param': ['param'], - '"param"': ['param'], - - 'param pam': ['param', 'pam'], - '"param" "pam"': ['param', 'pam'], - '"param" pam': ['param', 'pam'], - 'param "pam"': ['param', 'pam'], - - 'param "pam" p': ['param', 'pam', 'p'], - '"param" pam "p"': ['param', 'pam', 'p'], - - '"pa ram"': ['pa ram'], - '"pa ram" pam': ['pa ram', 'pam'], - 'param "p a m"': ['param', 'p a m'], - } - incorrect_data = [ - 'param"', - '"param', - 'param "pam', - 'param "pam" "p a', - ] - for host, values in correct_data.items(): - self.assertEquals( - conf._get_hosts(host), - values - ) - for host in incorrect_data: - self.assertRaises(Exception, conf._get_hosts, host) + assert 32768 == paramiko.util.clamp_value(32767, 32768, 32769) + assert 32767 == paramiko.util.clamp_value(32767, 32765, 32769) + assert 32769 == paramiko.util.clamp_value(32767, 32770, 32769) def test_safe_string(self): - vanilla = b("vanilla") - has_bytes = b("has \7\3 bytes") + vanilla = b"vanilla" + has_bytes = b"has \7\3 bytes" safe_vanilla = safe_string(vanilla) safe_has_bytes = safe_string(has_bytes) - expected_bytes = b("has %07%03 bytes") - err = "{0!r} != {1!r}" - assert safe_vanilla == vanilla, err.format(safe_vanilla, vanilla) - assert safe_has_bytes == expected_bytes, \ - err.format(safe_has_bytes, expected_bytes) - - def test_proxycommand_none_issue_418(self): - test_config_file = """ -Host proxycommand-standard-none - ProxyCommand None - -Host proxycommand-with-equals-none - ProxyCommand=None - """ - for host, values in { - 'proxycommand-standard-none': {'hostname': 'proxycommand-standard-none'}, - 'proxycommand-with-equals-none': {'hostname': 'proxycommand-with-equals-none'} - }.items(): - - f = StringIO(test_config_file) - config = paramiko.util.parse_ssh_config(f) - self.assertEqual( - paramiko.util.lookup_ssh_host_config(host, config), - values - ) - - def test_proxycommand_none_masking(self): - # Re: https://github.com/paramiko/paramiko/issues/670 - source_config = """ -Host specific-host - ProxyCommand none - -Host other-host - ProxyCommand other-proxy - -Host * - ProxyCommand default-proxy -""" - config = paramiko.SSHConfig() - config.parse(StringIO(source_config)) - # When bug is present, the full stripping-out of specific-host's - # ProxyCommand means it actually appears to pick up the default - # ProxyCommand value instead, due to cascading. It should (for - # backwards compatibility reasons in 1.x/2.x) appear completely blank, - # as if the host had no ProxyCommand whatsoever. - # Threw another unrelated host in there just for sanity reasons. - self.assertFalse('proxycommand' in config.lookup('specific-host')) - self.assertEqual( - config.lookup('other-host')['proxycommand'], - 'other-proxy' - ) - self.assertEqual( - config.lookup('some-random-host')['proxycommand'], - 'default-proxy' - ) + expected_bytes = b"has %07%03 bytes" + err = "{!r} != {!r}" + msg = err.format(safe_vanilla, vanilla) + assert safe_vanilla == vanilla, msg + msg = err.format(safe_has_bytes, expected_bytes) + assert safe_has_bytes == expected_bytes, msg diff --git a/tests/util.py b/tests/util.py index b546a7e1f..1355ce8a8 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,7 +1,146 @@ +from os.path import dirname, realpath, join import os +import struct +import sys +import unittest -root_path = os.path.dirname(os.path.realpath(__file__)) +import pytest -def test_path(filename): - return os.path.join(root_path, filename) +from paramiko.py3compat import builtins, PY2 +from paramiko.ssh_gss import GSS_AUTH_AVAILABLE + +tests_dir = dirname(realpath(__file__)) + + +def _support(filename): + return join(tests_dir, filename) + + +def _config(name): + return join(tests_dir, "configs", name) + + +needs_gssapi = pytest.mark.skipif( + not GSS_AUTH_AVAILABLE, reason="No GSSAPI to test" +) + + +def needs_builtin(name): + """ + Skip decorated test if builtin name does not exist. + """ + reason = "Test requires a builtin '{}'".format(name) + return pytest.mark.skipif(not hasattr(builtins, name), reason=reason) + + +slow = pytest.mark.slow + +# GSSAPI / Kerberos related tests need a working Kerberos environment. +# The class `KerberosTestCase` provides such an environment or skips all tests. +# There are 3 distinct cases: +# +# - A Kerberos environment has already been created and the environment +# contains the required information. +# +# - We can use the package 'k5test' to setup an working kerberos environment on +# the fly. +# +# - We skip all tests. +# +# ToDo: add a Windows specific implementation? + +if ( + os.environ.get("K5TEST_USER_PRINC", None) + and os.environ.get("K5TEST_HOSTNAME", None) + and os.environ.get("KRB5_KTNAME", None) +): # add other vars as needed + + # The environment provides the required information + class DummyK5Realm(object): + def __init__(self): + for k in os.environ: + if not k.startswith("K5TEST_"): + continue + setattr(self, k[7:].lower(), os.environ[k]) + self.env = {} + + class KerberosTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.realm = DummyK5Realm() + + @classmethod + def tearDownClass(cls): + del cls.realm + + +else: + try: + # Try to setup a kerberos environment + from k5test import KerberosTestCase + except Exception: + # Use a dummy, that skips all tests + class KerberosTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + raise unittest.SkipTest( + "Missing extension package k5test. " + 'Please run "pip install k5test" ' + "to install it." + ) + + +def update_env(testcase, mapping, env=os.environ): + """Modify os.environ during a test case and restore during cleanup.""" + saved_env = env.copy() + + def replace(target, source): + target.update(source) + for k in list(target): + if k not in source: + target.pop(k, None) + + testcase.addCleanup(replace, env, saved_env) + env.update(mapping) + return testcase + + +def k5shell(args=None): + """Create a shell with an kerberos environment + + This can be used to debug paramiko or to test the old GSSAPI. + To test a different GSSAPI, simply activate a suitable venv + within the shell. + """ + import k5test + import atexit + import subprocess + + k5 = k5test.K5Realm() + atexit.register(k5.stop) + os.environ.update(k5.env) + for n in ("realm", "user_princ", "hostname"): + os.environ["K5TEST_" + n.upper()] = getattr(k5, n) + + if not args: + args = sys.argv[1:] + if not args: + args = [os.environ.get("SHELL", "bash")] + sys.exit(subprocess.call(args)) + + +def is_low_entropy(): + """ + Attempts to detect whether running interpreter is low-entropy. + + Classified as being in 32-bit mode under Python 2, or 32-bit mode and with + the hash seed set to zero under Python 3. + """ + is_32bit = struct.calcsize("P") == 32 / 8 + if PY2: + return is_32bit + else: + # I don't see a way to tell internally if the hash seed was set this + # way, but env should be plenty sufficient, this is only for testing. + return is_32bit and os.environ.get("PYTHONHASHSEED", None) == "0" diff --git a/tox-requirements.txt b/tox-requirements.txt deleted file mode 100644 index 9645f854c..000000000 --- a/tox-requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -# Not sure why tox can't just read setup.py? -cryptography >= 1.1 -pyasn1 >= 0.1.7 diff --git a/tox.ini b/tox.ini deleted file mode 100644 index d420c1a3b..000000000 --- a/tox.ini +++ /dev/null @@ -1,6 +0,0 @@ -[tox] -envlist = py26,py27,py33,py34,pypy - -[testenv] -commands = pip install -q -r tox-requirements.txt - python test.py