Commit 0ae4f40d authored by 刘丙寅's avatar 刘丙寅

全上传代码

parent 462e9b29
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="7e9981c3-87b3-4c10-977f-3fdfc3f8d2ec" name="Default Changelist" comment="" />
<list default="true" id="7e9981c3-87b3-4c10-977f-3fdfc3f8d2ec" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/case/case_zhuanzhen_celue.py" beforeDir="false" afterPath="$PROJECT_DIR$/case/case_zhuanzhen_celue.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/report/result.html" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/report/更美转诊.html" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/INSTALLER" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/LICENSE" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/METADATA" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/RECORD" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/WHEEL" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/entry_points.txt" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet-4.0.0.dist-info/top_level.txt" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/__init__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/__init__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/charsetgroupprober.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/cli/chardetect.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/compat.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/compat.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langbulgarianmodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langgreekmodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhebrewmodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langhungarianmodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langrussianmodel.py" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langthaimodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langthaimodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/langturkishmodel.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/metadata/__init__.py" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/metadata/languages.py" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcharsetprober.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/sbcsgroupprober.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/universaldetector.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/universaldetector.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/version.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/chardet/version.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/INSTALLER" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/LICENSE" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/METADATA" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/RECORD" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/WHEEL" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests-2.25.1.dist-info/top_level.txt" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__init__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__init__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__version__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/__version__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/models.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/models.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/sessions.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/sessions.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/utils.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/requests/utils.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/INSTALLER" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/LICENSE.txt" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/METADATA" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/RECORD" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/WHEEL" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3-1.26.3.dist-info/top_level.txt" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/__init__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/__init__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/_collections.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/_collections.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/_version.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/_version.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/connection.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/connection.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/connectionpool.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/connectionpool.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/bindings.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/_securetransport/low_level.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/appengine.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/ntlmpool.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/pyopenssl.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/securetransport.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/socks.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/contrib/socks.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/exceptions.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/exceptions.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/fields.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/fields.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/filepost.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/filepost.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/packages/backports/makefile.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/packages/ssl_match_hostname/__init__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/poolmanager.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/poolmanager.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/request.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/request.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/response.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/response.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/__init__.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/__init__.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/connection.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/connection.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/proxy.py" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/queue.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/queue.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/request.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/request.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/response.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/response.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/retry.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/retry.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/ssl_.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/ssltransport.py" beforeDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/timeout.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/timeout.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/url.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/url.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/wait.py" beforeDir="false" afterPath="$PROJECT_DIR$/venv/lib/python3.7/site-packages/urllib3/util/wait.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/更美转诊.html" beforeDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
......@@ -9,25 +88,25 @@
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="CoverageDataManager">
<SUITE FILE_PATH="coverage/gm_zhuanzhen_test$Unittests_for_case_zhuanzhen_celue_Case_test_01.coverage" NAME="Unittests for case_zhuanzhen_celue.Case.test_01 Coverage Results" MODIFIED="1614826293059" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/case" />
<SUITE FILE_PATH="coverage/gm_zhuanzhen_test$allcase.coverage" NAME="allcase Coverage Results" MODIFIED="1614829622080" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/gm_zhuanzhen_test$Unittests_for_case_zhuanzhen_celue_Case_test_01.coverage" NAME="Unittests for case_zhuanzhen_celue.Case.test_01 Coverage Results" MODIFIED="1614913037277" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/case" />
<SUITE FILE_PATH="coverage/gm_zhuanzhen_test$allcase.coverage" NAME="allcase Coverage Results" MODIFIED="1614928087227" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
</component>
<component name="FileEditorManager">
<leaf>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/assert_page/assert_method.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="230">
<caret line="10" column="27" selection-start-line="10" selection-start-column="27" selection-end-line="10" selection-end-column="27" />
<state relative-caret-position="138">
<caret line="6" column="30" selection-start-line="6" selection-start-column="30" selection-end-line="6" selection-end-column="30" />
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="false">
<file pinned="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/case/case_zhuanzhen_celue.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="391">
<caret line="17" column="33" lean-forward="true" selection-start-line="17" selection-start-column="33" selection-end-line="17" selection-end-column="33" />
<state relative-caret-position="713">
<caret line="31" column="52" selection-start-line="31" selection-start-column="52" selection-end-line="31" selection-end-column="52" />
<folding>
<element signature="e#0#15#0" expanded="true" />
</folding>
......@@ -36,25 +115,12 @@
</entry>
</file>
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/interface/interface.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="138">
<caret line="6" column="24" selection-start-line="6" selection-start-column="24" selection-end-line="6" selection-end-column="24" />
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/allcase.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="371">
<caret line="156" column="30" lean-forward="true" selection-start-line="156" selection-start-column="30" selection-end-line="156" selection-end-column="30" />
<state relative-caret-position="368">
<caret line="16" column="3" lean-forward="true" selection-start-line="16" selection-start-column="3" selection-end-line="16" selection-end-column="3" />
<folding>
<element signature="e#15#30#0" expanded="true" />
<element signature="e#632#1095#0" />
<element signature="e#1147#2609#0" />
<element signature="e#1632#1644#0" />
<element signature="e#2650#2917#0" />
<element signature="e#2992#4025#0" />
</folding>
</state>
......@@ -70,22 +136,27 @@
</list>
</option>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="IdeDocumentHistory">
<option name="CHANGED_PATHS">
<list>
<option value="$PROJECT_DIR$/assert_page/__init__.py" />
<option value="$PROJECT_DIR$/interface/interface.py" />
<option value="$PROJECT_DIR$/case/case_zhuanzhen_celue.py" />
<option value="$PROJECT_DIR$/venv/lib/python3.7/site-packages/unittestreport/core/testRunner.py" />
<option value="$PROJECT_DIR$/allcase.py" />
<option value="$PROJECT_DIR$/case/case_zhuanzhen_celue.py" />
</list>
</option>
</component>
<component name="ProjectFrameBounds" fullScreen="true">
<option name="x" value="-1920" />
<option name="y" value="-157" />
<option name="width" value="1920" />
<option name="height" value="1057" />
<component name="ProjectFrameBounds" extendedState="6">
<option name="x" value="-1913" />
<option name="y" value="-154" />
<option name="width" value="1905" />
<option name="height" value="1050" />
</component>
<component name="ProjectLevelVcsManager" settingsEditedManually="true" />
<component name="ProjectView">
<navigator proportions="" version="1">
<foldersAlwaysOnTop value="true" />
......@@ -148,7 +219,10 @@
</list>
</option>
</component>
<component name="RunManager" selected="Python.allcase">
<component name="RunManager" selected="JavaScript Debug.转诊.html">
<configuration name="转诊.html" type="JavascriptDebugType" temporary="true" nameIsGenerated="true" uri="http://localhost:63342/gm-zhuanzhen-test/report/转诊.html" useBuiltInWebServerPort="true">
<method v="2" />
</configuration>
<configuration name="allcase" type="PythonConfigurationType" factoryName="Python" temporary="true">
<module name="gm-zhuanzhen-test" />
<option name="INTERPRETER_OPTIONS" value="" />
......@@ -188,6 +262,7 @@
</configuration>
<recent_temporary>
<list>
<item itemvalue="JavaScript Debug.转诊.html" />
<item itemvalue="Python.allcase" />
<item itemvalue="Python tests.Unittests for case_zhuanzhen_celue.Case.test_01" />
</list>
......@@ -203,8 +278,58 @@
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1614825263571</updated>
<workItem from="1614825264853" duration="5170000" />
<workItem from="1614825264853" duration="10642000" />
</task>
<task id="LOCAL-00001" summary="测试git联通">
<created>1614849671013</created>
<option name="number" value="00001" />
<option name="presentableId" value="LOCAL-00001" />
<option name="project" value="LOCAL" />
<updated>1614849671015</updated>
</task>
<task id="LOCAL-00002" summary="测试git测试">
<created>1614851380795</created>
<option name="number" value="00002" />
<option name="presentableId" value="LOCAL-00002" />
<option name="project" value="LOCAL" />
<updated>1614851380795</updated>
</task>
<task id="LOCAL-00003" summary="测试git测试">
<created>1614851970065</created>
<option name="number" value="00003" />
<option name="presentableId" value="LOCAL-00003" />
<option name="project" value="LOCAL" />
<updated>1614851970065</updated>
</task>
<task id="LOCAL-00004" summary="回归正常代码,稳定跑">
<created>1614852139575</created>
<option name="number" value="00004" />
<option name="presentableId" value="LOCAL-00004" />
<option name="project" value="LOCAL" />
<updated>1614852139575</updated>
</task>
<task id="LOCAL-00005" summary="优化代码">
<created>1614913107422</created>
<option name="number" value="00005" />
<option name="presentableId" value="LOCAL-00005" />
<option name="project" value="LOCAL" />
<updated>1614913107422</updated>
</task>
<task id="LOCAL-00006" summary="加机器人">
<created>1614913304295</created>
<option name="number" value="00006" />
<option name="presentableId" value="LOCAL-00006" />
<option name="project" value="LOCAL" />
<updated>1614913304295</updated>
</task>
<task id="LOCAL-00007" summary="代码优化 - 变更报告模式。新增日志输出以及新增失败重跑三次。">
<created>1614928539454</created>
<option name="number" value="00007" />
<option name="presentableId" value="LOCAL-00007" />
<option name="project" value="LOCAL" />
<updated>1614928539454</updated>
</task>
<option name="localTasksCounter" value="8" />
<servers />
</component>
<component name="TestHistory">
......@@ -214,63 +339,58 @@
<history-entry file="Unittests_for_case_zhuanzhen_celue_Case_test_01 - 2021.03.04 at 10h 51m 35s.xml">
<configuration name="Unittests for case_zhuanzhen_celue.Case.test_01" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_for_case_zhuanzhen_celue_Case_test_01 - 2021.03.05 at 10h 36m 16s.xml">
<configuration name="Unittests for case_zhuanzhen_celue.Case.test_01" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_for_case_zhuanzhen_celue_Case_test_01 - 2021.03.05 at 10h 37m 38s.xml">
<configuration name="Unittests for case_zhuanzhen_celue.Case.test_01" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_for_case_zhuanzhen_celue_Case_test_01 - 2021.03.05 at 10h 57m 19s.xml">
<configuration name="Unittests for case_zhuanzhen_celue.Case.test_01" configurationId="tests" />
</history-entry>
</component>
<component name="TimeTrackingManager">
<option name="totallyTimeSpent" value="5170000" />
<option name="totallyTimeSpent" value="10642000" />
</component>
<component name="ToolWindowManager">
<frame x="-1920" y="-180" width="1920" height="1080" extended-state="0" />
<frame x="-1920" y="-157" width="1920" height="1057" extended-state="6" />
<editor active="true" />
<layout>
<window_info id="Favorites" side_tool="true" />
<window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.24973376" />
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
<window_info anchor="bottom" id="Docker" show_stripe_button="false" />
<window_info anchor="bottom" id="Database Changes" />
<window_info anchor="bottom" id="Version Control" />
<window_info anchor="bottom" id="Python Console" />
<window_info anchor="bottom" id="Terminal" weight="0.32970297" />
<window_info anchor="bottom" id="Event Log" side_tool="true" />
<window_info anchor="bottom" id="Message" order="0" />
<window_info anchor="bottom" id="Find" order="1" weight="0.32970297" />
<window_info anchor="bottom" id="Run" order="2" weight="0.32970297" />
<window_info anchor="bottom" id="Debug" order="3" weight="0.4" />
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
<window_info anchor="bottom" id="TODO" order="6" />
<window_info anchor="right" id="SciView" />
<window_info anchor="right" id="Database" />
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
</layout>
<layout-to-restore>
<window_info id="Favorites" order="0" side_tool="true" />
<window_info active="true" content_ui="combo" id="Project" order="1" visible="true" weight="0.24973376" />
<window_info active="true" content_ui="combo" id="Project" order="1" visible="true" weight="0.16240682" />
<window_info id="Structure" order="2" side_tool="true" weight="0.25" />
<window_info anchor="bottom" id="Docker" order="0" show_stripe_button="false" />
<window_info anchor="bottom" id="Terminal" order="1" weight="0.32970297" />
<window_info anchor="bottom" id="Database Changes" order="0" />
<window_info anchor="bottom" id="Terminal" order="1" weight="0.47503567" />
<window_info anchor="bottom" id="Event Log" order="2" side_tool="true" />
<window_info anchor="bottom" id="Database Changes" order="3" />
<window_info anchor="bottom" id="Version Control" order="4" />
<window_info anchor="bottom" id="Python Console" order="5" />
<window_info anchor="bottom" id="Version Control" order="3" />
<window_info anchor="bottom" id="Python Console" order="4" />
<window_info anchor="bottom" id="Docker" order="5" show_stripe_button="false" />
<window_info anchor="bottom" id="Message" order="6" />
<window_info anchor="bottom" id="Find" order="7" weight="0.32970297" />
<window_info anchor="bottom" id="Run" order="8" weight="0.32970297" />
<window_info anchor="bottom" id="Debug" order="9" weight="0.4" />
<window_info anchor="bottom" id="Find" order="7" weight="0.32952926" />
<window_info anchor="bottom" id="Run" order="8" weight="0.32952926" />
<window_info anchor="bottom" id="Debug" order="9" weight="0.39942938" />
<window_info anchor="bottom" id="Cvs" order="10" weight="0.25" />
<window_info anchor="bottom" id="Inspection" order="11" weight="0.4" />
<window_info anchor="bottom" id="TODO" order="12" />
<window_info anchor="right" id="Database" order="0" />
<window_info anchor="right" id="SciView" order="1" />
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="2" type="SLIDING" weight="0.4" />
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="1" type="SLIDING" weight="0.4" />
<window_info anchor="right" id="SciView" order="2" />
<window_info anchor="right" id="Ant Build" order="3" weight="0.25" />
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="4" weight="0.25" />
</layout-to-restore>
</layout>
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="1" />
</component>
<component name="VcsManagerConfiguration">
<MESSAGE value="测试git联通" />
<MESSAGE value="测试git测试" />
<MESSAGE value="回归正常代码,稳定跑" />
<MESSAGE value="优化代码" />
<MESSAGE value="加机器人" />
<MESSAGE value="代码优化 - 变更报告模式。新增日志输出以及新增失败重跑三次。" />
<option name="LAST_COMMIT_MESSAGE" value="代码优化 - 变更报告模式。新增日志输出以及新增失败重跑三次。" />
</component>
<component name="editorHistoryManager">
<entry file="file://$PROJECT_DIR$/assert_page/qiniu.py">
<provider selected="true" editor-type-id="text-editor" />
......@@ -292,13 +412,6 @@
<entry file="file://$PROJECT_DIR$/report/result.html">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/venv/lib/python3.7/site-packages/BeautifulReport/BeautifulReport.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="487">
<caret line="367" column="34" lean-forward="true" selection-start-line="367" selection-start-column="34" selection-end-line="367" selection-end-column="34" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/interface/__init__.py">
<provider selected="true" editor-type-id="text-editor" />
</entry>
......@@ -309,10 +422,27 @@
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/assert_page/assert_method.py">
<entry file="file://$PROJECT_DIR$/转诊.html">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/venv/lib/python3.7/site-packages/unittestreport/core/testRunner.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="258">
<caret line="27" column="32" lean-forward="true" selection-start-line="27" selection-start-column="32" selection-end-line="27" selection-end-column="32" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/venv/lib/python3.7/site-packages/BeautifulReport/BeautifulReport.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="325">
<caret line="347" column="8" selection-start-line="347" selection-start-column="8" selection-end-line="347" selection-end-column="8" />
</state>
</provider>
</entry>
<entry file="file://$APPLICATION_HOME_DIR$/helpers/typeshed/stdlib/3/unittest/__init__.pyi">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="230">
<caret line="10" column="27" selection-start-line="10" selection-start-column="27" selection-end-line="10" selection-end-column="27" />
<state relative-caret-position="346">
<caret line="248" column="14" selection-start-line="248" selection-start-column="14" selection-end-line="248" selection-end-column="14" />
</state>
</provider>
</entry>
......@@ -323,27 +453,30 @@
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/case/case_zhuanzhen_celue.py">
<entry file="file://$PROJECT_DIR$/allcase.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="391">
<caret line="17" column="33" lean-forward="true" selection-start-line="17" selection-start-column="33" selection-end-line="17" selection-end-column="33" />
<state relative-caret-position="368">
<caret line="16" column="3" lean-forward="true" selection-start-line="16" selection-start-column="3" selection-end-line="16" selection-end-column="3" />
<folding>
<element signature="e#0#15#0" expanded="true" />
<element signature="e#15#30#0" expanded="true" />
<element signature="e#2992#4025#0" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/allcase.py">
<entry file="file://$PROJECT_DIR$/assert_page/assert_method.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="138">
<caret line="6" column="30" selection-start-line="6" selection-start-column="30" selection-end-line="6" selection-end-column="30" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/case/case_zhuanzhen_celue.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="371">
<caret line="156" column="30" lean-forward="true" selection-start-line="156" selection-start-column="30" selection-end-line="156" selection-end-column="30" />
<state relative-caret-position="713">
<caret line="31" column="52" selection-start-line="31" selection-start-column="52" selection-end-line="31" selection-end-column="52" />
<folding>
<element signature="e#15#30#0" expanded="true" />
<element signature="e#632#1095#0" />
<element signature="e#1147#2609#0" />
<element signature="e#1632#1644#0" />
<element signature="e#2650#2917#0" />
<element signature="e#2992#4025#0" />
<element signature="e#0#15#0" expanded="true" />
</folding>
</state>
</provider>
......
This source diff could not be displayed because it is too large. You can view the blob instead.
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!
Metadata-Version: 2.1
Name: chardet
Version: 4.0.0
Summary: Universal encoding detector for Python 2 and 3
Home-page: https://github.com/chardet/chardet
Author: Mark Pilgrim
Author-email: mark@diveintomark.org
Maintainer: Daniel Blanchard
Maintainer-email: dan.blanchard@gmail.com
License: LGPL
Keywords: encoding,i18n,xml
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Text Processing :: Linguistic
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Chardet: The Universal Character Encoding Detector
--------------------------------------------------
.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg
:alt: Build status
:target: https://travis-ci.org/chardet/chardet
.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg
:target: https://coveralls.io/r/chardet/chardet
.. image:: https://img.shields.io/pypi/v/chardet.svg
:target: https://warehouse.python.org/project/chardet/
:alt: Latest version on PyPI
.. image:: https://img.shields.io/pypi/l/chardet.svg
:alt: License
Detects
- ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants)
- Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese)
- EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese)
- EUC-KR, ISO-2022-KR (Korean)
- KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic)
- ISO-8859-5, windows-1251 (Bulgarian)
- ISO-8859-1, windows-1252 (Western European languages)
- ISO-8859-7, windows-1253 (Greek)
- ISO-8859-8, windows-1255 (Visual and Logical Hebrew)
- TIS-620 (Thai)
.. note::
Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily
disabled until we can retrain the models.
Requires Python 2.7 or 3.5+.
Installation
------------
Install from `PyPI <https://pypi.org/project/chardet/>`_::
pip install chardet
Documentation
-------------
For users, docs are now available at https://chardet.readthedocs.io/.
Command-line Tool
-----------------
chardet comes with a command-line script which reports on the encodings of one
or more files::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
About
-----
This is a continuation of Mark Pilgrim's excellent chardet. Previously, two
versions needed to be maintained: one that supported python 2.x and one that
supported python 3.x. We've recently merged with `Ian Cordasco <https://github.com/sigmavirus24>`_'s
`charade <https://github.com/sigmavirus24/charade>`_ fork, so now we have one
coherent version that works for Python 2.7+ and 3.4+.
:maintainer: Dan Blanchard
chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271
chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839
chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200
chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
chardet/langbulgarianmodel.py,sha256=r6tvOtO8FqhnbWBB5V4czcl1fWM4pB9lGiWQU-8gvsw,105685
chardet/langgreekmodel.py,sha256=1cMu2wUgPB8bQ2RbVjR4LNwCCETgQ-Dwo0Eg2_uB11s,99559
chardet/langhebrewmodel.py,sha256=urMmJHHIXtCwaWAqy1zEY_4SmwwNzt730bDOtjXzRjs,98764
chardet/langhungarianmodel.py,sha256=ODAisvqCfes8B4FeyM_Pg9HY3ZDnEyaCiT4Bxyzoc6w,102486
chardet/langrussianmodel.py,sha256=sPqkrBbX0QVwwy6oqRl-x7ERv2J4-zaMoCvLpkSsSJI,131168
chardet/langthaimodel.py,sha256=ppoKOGL9OPdj9A4CUyG8R48zbnXt9MN1WXeCYepa6sc,103300
chardet/langturkishmodel.py,sha256=H3ldicI_rhlv0r3VFpVWtUL6X30Wy596v7_YHz2sEdg,95934
chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136
chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309
chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503
chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242
chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
chardet/cli/chardetect.py,sha256=kUPeQCi-olObXpOq5MtlKuBn1EU19rkeenAMwxl7URY,2711
chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474
chardet-4.0.0.dist-info/LICENSE,sha256=YJXp_6d33SKDn3gBqoRbMcntB_PWv4om3F0t7IzMDvM,26432
chardet-4.0.0.dist-info/METADATA,sha256=ySYQAE7NPm3LwxgMqFi1zdLQ48mmwMbrJwqAWCtcbH8,3526
chardet-4.0.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
chardet-4.0.0.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60
chardet-4.0.0.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8
chardet-4.0.0.dist-info/RECORD,,
../../../bin/chardetect,sha256=v8BhfHjGU-xM32L7E6nVxFgEi--nxIBwp6MYeeUOF0k,285
chardet-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
chardet/__pycache__/mbcharsetprober.cpython-37.pyc,,
chardet/__pycache__/eucjpprober.cpython-37.pyc,,
chardet/__pycache__/langgreekmodel.cpython-37.pyc,,
chardet/__pycache__/sjisprober.cpython-37.pyc,,
chardet/__pycache__/gb2312prober.cpython-37.pyc,,
chardet/__pycache__/euctwfreq.cpython-37.pyc,,
chardet/__pycache__/cp949prober.cpython-37.pyc,,
chardet/__pycache__/version.cpython-37.pyc,,
chardet/__pycache__/langhebrewmodel.cpython-37.pyc,,
chardet/__pycache__/langturkishmodel.cpython-37.pyc,,
chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,,
chardet/__pycache__/euckrfreq.cpython-37.pyc,,
chardet/__pycache__/euckrprober.cpython-37.pyc,,
chardet/__pycache__/escsm.cpython-37.pyc,,
chardet/__pycache__/chardistribution.cpython-37.pyc,,
chardet/__pycache__/euctwprober.cpython-37.pyc,,
chardet/__pycache__/langrussianmodel.cpython-37.pyc,,
chardet/__pycache__/compat.cpython-37.pyc,,
chardet/__pycache__/big5freq.cpython-37.pyc,,
chardet/__pycache__/escprober.cpython-37.pyc,,
chardet/__pycache__/universaldetector.cpython-37.pyc,,
chardet/__pycache__/latin1prober.cpython-37.pyc,,
chardet/__pycache__/hebrewprober.cpython-37.pyc,,
chardet/__pycache__/utf8prober.cpython-37.pyc,,
chardet/__pycache__/langhungarianmodel.cpython-37.pyc,,
chardet/__pycache__/codingstatemachine.cpython-37.pyc,,
chardet/__pycache__/langthaimodel.cpython-37.pyc,,
chardet/__pycache__/enums.cpython-37.pyc,,
chardet/__pycache__/gb2312freq.cpython-37.pyc,,
chardet/__pycache__/big5prober.cpython-37.pyc,,
chardet/__pycache__/mbcssm.cpython-37.pyc,,
chardet/__pycache__/jpcntx.cpython-37.pyc,,
chardet/__pycache__/sbcharsetprober.cpython-37.pyc,,
chardet/__pycache__/jisfreq.cpython-37.pyc,,
chardet/__pycache__/__init__.cpython-37.pyc,,
chardet/__pycache__/charsetprober.cpython-37.pyc,,
chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,,
chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,,
chardet/__pycache__/charsetgroupprober.cpython-37.pyc,,
chardet/cli/__pycache__/chardetect.cpython-37.pyc,,
chardet/cli/__pycache__/__init__.cpython-37.pyc,,
chardet/metadata/__pycache__/__init__.cpython-37.pyc,,
chardet/metadata/__pycache__/languages.cpython-37.pyc,,
Wheel-Version: 1.0
Generator: bdist_wheel (0.35.1)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
[console_scripts]
chardetect = chardet.cli.chardetect:main
......@@ -16,14 +16,11 @@
######################### END LICENSE BLOCK #########################
from .compat import PY2, PY3
from .universaldetector import UniversalDetector
from .enums import InputState
from .version import __version__, VERSION
__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION']
def detect(byte_str):
"""
Detect the encoding of the given byte string.
......@@ -34,50 +31,9 @@ def detect(byte_str):
if not isinstance(byte_str, bytearray):
if not isinstance(byte_str, bytes):
raise TypeError('Expected object of type bytes or bytearray, got: '
'{}'.format(type(byte_str)))
'{0}'.format(type(byte_str)))
else:
byte_str = bytearray(byte_str)
detector = UniversalDetector()
detector.feed(byte_str)
return detector.close()
def detect_all(byte_str):
"""
Detect all the possible encodings of the given byte string.
:param byte_str: The byte sequence to examine.
:type byte_str: ``bytes`` or ``bytearray``
"""
if not isinstance(byte_str, bytearray):
if not isinstance(byte_str, bytes):
raise TypeError('Expected object of type bytes or bytearray, got: '
'{}'.format(type(byte_str)))
else:
byte_str = bytearray(byte_str)
detector = UniversalDetector()
detector.feed(byte_str)
detector.close()
if detector._input_state == InputState.HIGH_BYTE:
results = []
for prober in detector._charset_probers:
if prober.get_confidence() > detector.MINIMUM_THRESHOLD:
charset_name = prober.charset_name
lower_charset_name = prober.charset_name.lower()
# Use Windows encoding name instead of ISO-8859 if we saw any
# extra Windows-specific bytes
if lower_charset_name.startswith('iso-8859'):
if detector._has_win_bytes:
charset_name = detector.ISO_WIN_MAP.get(lower_charset_name,
charset_name)
results.append({
'encoding': charset_name,
'confidence': prober.get_confidence(),
'language': prober.language,
})
if len(results) > 0:
return sorted(results, key=lambda result: -result['confidence'])
return [detector.result]
......@@ -73,7 +73,6 @@ class CharSetGroupProber(CharSetProber):
continue
if state == ProbingState.FOUND_IT:
self._best_guess_prober = prober
self._state = ProbingState.FOUND_IT
return self.state
elif state == ProbingState.NOT_ME:
prober.active = False
......
#!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings
......@@ -44,10 +45,10 @@ def description_of(lines, name='stdin'):
if PY2:
name = name.decode(sys.getfilesystemencoding(), 'ignore')
if result['encoding']:
return '{}: {} with confidence {}'.format(name, result['encoding'],
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
else:
return '{}: no result'.format(name)
return '{0}: no result'.format(name)
def main(argv=None):
......@@ -68,7 +69,7 @@ def main(argv=None):
type=argparse.FileType('rb'), nargs='*',
default=[sys.stdin if PY2 else sys.stdin.buffer])
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(__version__))
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
for f in args.input:
......
......@@ -25,12 +25,10 @@ import sys
if sys.version_info < (3, 0):
PY2 = True
PY3 = False
string_types = (str, unicode)
base_str = (str, unicode)
text_type = unicode
iteritems = dict.iteritems
else:
PY2 = False
PY3 = True
string_types = (bytes, str)
base_str = (bytes, str)
text_type = str
iteritems = dict.items
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Metadata about languages used by our model training code for our
SingleByteCharSetProbers. Could be used for other things in the future.
This code is based on the language metadata from the uchardet project.
"""
from __future__ import absolute_import, print_function
from string import ascii_letters
# TODO: Add Ukranian (KOI8-U)
class Language(object):
"""Metadata about a language useful for training models
:ivar name: The human name for the language, in English.
:type name: str
:ivar iso_code: 2-letter ISO 639-1 if possible, 3-letter ISO code otherwise,
or use another catalog as a last resort.
:type iso_code: str
:ivar use_ascii: Whether or not ASCII letters should be included in trained
models.
:type use_ascii: bool
:ivar charsets: The charsets we want to support and create data for.
:type charsets: list of str
:ivar alphabet: The characters in the language's alphabet. If `use_ascii` is
`True`, you only need to add those not in the ASCII set.
:type alphabet: str
:ivar wiki_start_pages: The Wikipedia pages to start from if we're crawling
Wikipedia for training data.
:type wiki_start_pages: list of str
"""
def __init__(self, name=None, iso_code=None, use_ascii=True, charsets=None,
alphabet=None, wiki_start_pages=None):
super(Language, self).__init__()
self.name = name
self.iso_code = iso_code
self.use_ascii = use_ascii
self.charsets = charsets
if self.use_ascii:
if alphabet:
alphabet += ascii_letters
else:
alphabet = ascii_letters
elif not alphabet:
raise ValueError('Must supply alphabet if use_ascii is False')
self.alphabet = ''.join(sorted(set(alphabet))) if alphabet else None
self.wiki_start_pages = wiki_start_pages
def __repr__(self):
return '{}({})'.format(self.__class__.__name__,
', '.join('{}={!r}'.format(k, v)
for k, v in self.__dict__.items()
if not k.startswith('_')))
LANGUAGES = {'Arabic': Language(name='Arabic',
iso_code='ar',
use_ascii=False,
# We only support encodings that use isolated
# forms, because the current recommendation is
# that the rendering system handles presentation
# forms. This means we purposefully skip IBM864.
charsets=['ISO-8859-6', 'WINDOWS-1256',
'CP720', 'CP864'],
alphabet=u'ءآأؤإئابةتثجحخدذرزسشصضطظعغػؼؽؾؿـفقكلمنهوىيًٌٍَُِّ',
wiki_start_pages=[u'الصفحة_الرئيسية']),
'Belarusian': Language(name='Belarusian',
iso_code='be',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'IBM866', 'MacCyrillic'],
alphabet=(u'АБВГДЕЁЖЗІЙКЛМНОПРСТУЎФХЦЧШЫЬЭЮЯ'
u'абвгдеёжзійклмнопрстуўфхцчшыьэюяʼ'),
wiki_start_pages=[u'Галоўная_старонка']),
'Bulgarian': Language(name='Bulgarian',
iso_code='bg',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'IBM855'],
alphabet=(u'АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЬЮЯ'
u'абвгдежзийклмнопрстуфхцчшщъьюя'),
wiki_start_pages=[u'Начална_страница']),
'Czech': Language(name='Czech',
iso_code='cz',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'áčďéěíňóřšťúůýžÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ',
wiki_start_pages=[u'Hlavní_strana']),
'Danish': Language(name='Danish',
iso_code='da',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'æøåÆØÅ',
wiki_start_pages=[u'Forside']),
'German': Language(name='German',
iso_code='de',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
alphabet=u'äöüßÄÖÜ',
wiki_start_pages=[u'Wikipedia:Hauptseite']),
'Greek': Language(name='Greek',
iso_code='el',
use_ascii=False,
charsets=['ISO-8859-7', 'WINDOWS-1253'],
alphabet=(u'αβγδεζηθικλμνξοπρσςτυφχψωάέήίόύώ'
u'ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΣΤΥΦΧΨΩΆΈΉΊΌΎΏ'),
wiki_start_pages=[u'Πύλη:Κύρια']),
'English': Language(name='English',
iso_code='en',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
wiki_start_pages=[u'Main_Page']),
'Esperanto': Language(name='Esperanto',
iso_code='eo',
# Q, W, X, and Y not used at all
use_ascii=False,
charsets=['ISO-8859-3'],
alphabet=(u'abcĉdefgĝhĥijĵklmnoprsŝtuŭvz'
u'ABCĈDEFGĜHĤIJĴKLMNOPRSŜTUŬVZ'),
wiki_start_pages=[u'Vikipedio:Ĉefpaĝo']),
'Spanish': Language(name='Spanish',
iso_code='es',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ñáéíóúüÑÁÉÍÓÚÜ',
wiki_start_pages=[u'Wikipedia:Portada']),
'Estonian': Language(name='Estonian',
iso_code='et',
use_ascii=False,
charsets=['ISO-8859-4', 'ISO-8859-13',
'WINDOWS-1257'],
# C, F, Š, Q, W, X, Y, Z, Ž are only for
# loanwords
alphabet=(u'ABDEGHIJKLMNOPRSTUVÕÄÖÜ'
u'abdeghijklmnoprstuvõäöü'),
wiki_start_pages=[u'Esileht']),
'Finnish': Language(name='Finnish',
iso_code='fi',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÅÄÖŠŽåäöšž',
wiki_start_pages=[u'Wikipedia:Etusivu']),
'French': Language(name='French',
iso_code='fr',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ',
wiki_start_pages=[u'Wikipédia:Accueil_principal',
u'Bœuf (animal)']),
'Hebrew': Language(name='Hebrew',
iso_code='he',
use_ascii=False,
charsets=['ISO-8859-8', 'WINDOWS-1255'],
alphabet=u'אבגדהוזחטיךכלםמןנסעףפץצקרשתװױײ',
wiki_start_pages=[u'עמוד_ראשי']),
'Croatian': Language(name='Croatian',
iso_code='hr',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcčćdđefghijklmnoprsštuvzž'
u'ABCČĆDĐEFGHIJKLMNOPRSŠTUVZŽ'),
wiki_start_pages=[u'Glavna_stranica']),
'Hungarian': Language(name='Hungarian',
iso_code='hu',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcdefghijklmnoprstuvzáéíóöőúüű'
u'ABCDEFGHIJKLMNOPRSTUVZÁÉÍÓÖŐÚÜŰ'),
wiki_start_pages=[u'Kezdőlap']),
'Italian': Language(name='Italian',
iso_code='it',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÀÈÉÌÒÓÙàèéìòóù',
wiki_start_pages=[u'Pagina_principale']),
'Lithuanian': Language(name='Lithuanian',
iso_code='lt',
use_ascii=False,
charsets=['ISO-8859-13', 'WINDOWS-1257',
'ISO-8859-4'],
# Q, W, and X not used at all
alphabet=(u'AĄBCČDEĘĖFGHIĮYJKLMNOPRSŠTUŲŪVZŽ'
u'aąbcčdeęėfghiįyjklmnoprsštuųūvzž'),
wiki_start_pages=[u'Pagrindinis_puslapis']),
'Latvian': Language(name='Latvian',
iso_code='lv',
use_ascii=False,
charsets=['ISO-8859-13', 'WINDOWS-1257',
'ISO-8859-4'],
# Q, W, X, Y are only for loanwords
alphabet=(u'AĀBCČDEĒFGĢHIĪJKĶLĻMNŅOPRSŠTUŪVZŽ'
u'aābcčdeēfgģhiījkķlļmnņoprsštuūvzž'),
wiki_start_pages=[u'Sākumlapa']),
'Macedonian': Language(name='Macedonian',
iso_code='mk',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'MacCyrillic', 'IBM855'],
alphabet=(u'АБВГДЃЕЖЗЅИЈКЛЉМНЊОПРСТЌУФХЦЧЏШ'
u'абвгдѓежзѕијклљмнњопрстќуфхцчџш'),
wiki_start_pages=[u'Главна_страница']),
'Dutch': Language(name='Dutch',
iso_code='nl',
use_ascii=True,
charsets=['ISO-8859-1', 'WINDOWS-1252'],
wiki_start_pages=[u'Hoofdpagina']),
'Polish': Language(name='Polish',
iso_code='pl',
# Q and X are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'AĄBCĆDEĘFGHIJKLŁMNŃOÓPRSŚTUWYZŹŻ'
u'aąbcćdeęfghijklłmnńoóprsśtuwyzźż'),
wiki_start_pages=[u'Wikipedia:Strona_główna']),
'Portuguese': Language(name='Portuguese',
iso_code='pt',
use_ascii=True,
charsets=['ISO-8859-1', 'ISO-8859-15',
'WINDOWS-1252'],
alphabet=u'ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú',
wiki_start_pages=[u'Wikipédia:Página_principal']),
'Romanian': Language(name='Romanian',
iso_code='ro',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'ăâîșțĂÂÎȘȚ',
wiki_start_pages=[u'Pagina_principală']),
'Russian': Language(name='Russian',
iso_code='ru',
use_ascii=False,
charsets=['ISO-8859-5', 'WINDOWS-1251',
'KOI8-R', 'MacCyrillic', 'IBM866',
'IBM855'],
alphabet=(u'абвгдеёжзийклмнопрстуфхцчшщъыьэюя'
u'АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ'),
wiki_start_pages=[u'Заглавная_страница']),
'Slovak': Language(name='Slovak',
iso_code='sk',
use_ascii=True,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=u'áäčďéíĺľňóôŕšťúýžÁÄČĎÉÍĹĽŇÓÔŔŠŤÚÝŽ',
wiki_start_pages=[u'Hlavná_stránka']),
'Slovene': Language(name='Slovene',
iso_code='sl',
# Q, W, X, Y are only used for foreign words.
use_ascii=False,
charsets=['ISO-8859-2', 'WINDOWS-1250'],
alphabet=(u'abcčdefghijklmnoprsštuvzž'
u'ABCČDEFGHIJKLMNOPRSŠTUVZŽ'),
wiki_start_pages=[u'Glavna_stran']),
# Serbian can be written in both Latin and Cyrillic, but there's no
# simple way to get the Latin alphabet pages from Wikipedia through
# the API, so for now we just support Cyrillic.
'Serbian': Language(name='Serbian',
iso_code='sr',
alphabet=(u'АБВГДЂЕЖЗИЈКЛЉМНЊОПРСТЋУФХЦЧЏШ'
u'абвгдђежзијклљмнњопрстћуфхцчџш'),
charsets=['ISO-8859-5', 'WINDOWS-1251',
'MacCyrillic', 'IBM855'],
wiki_start_pages=[u'Главна_страна']),
'Thai': Language(name='Thai',
iso_code='th',
use_ascii=False,
charsets=['ISO-8859-11', 'TIS-620', 'CP874'],
alphabet=u'กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛',
wiki_start_pages=[u'หน้าหลัก']),
'Turkish': Language(name='Turkish',
iso_code='tr',
# Q, W, and X are not used by Turkish
use_ascii=False,
charsets=['ISO-8859-3', 'ISO-8859-9',
'WINDOWS-1254'],
alphabet=(u'abcçdefgğhıijklmnoöprsştuüvyzâîû'
u'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZÂÎÛ'),
wiki_start_pages=[u'Ana_Sayfa']),
'Vietnamese': Language(name='Vietnamese',
iso_code='vi',
use_ascii=False,
# Windows-1258 is the only common 8-bit
# Vietnamese encoding supported by Python.
# From Wikipedia:
# For systems that lack support for Unicode,
# dozens of 8-bit Vietnamese code pages are
# available.[1] The most common are VISCII
# (TCVN 5712:1993), VPS, and Windows-1258.[3]
# Where ASCII is required, such as when
# ensuring readability in plain text e-mail,
# Vietnamese letters are often encoded
# according to Vietnamese Quoted-Readable
# (VIQR) or VSCII Mnemonic (VSCII-MNEM),[4]
# though usage of either variable-width
# scheme has declined dramatically following
# the adoption of Unicode on the World Wide
# Web.
charsets=['WINDOWS-1258'],
alphabet=(u'aăâbcdđeêghiklmnoôơpqrstuưvxy'
u'AĂÂBCDĐEÊGHIKLMNOÔƠPQRSTUƯVXY'),
wiki_start_pages=[u'Chữ_Quốc_ngữ']),
}
......@@ -26,22 +26,10 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from collections import namedtuple
from .charsetprober import CharSetProber
from .enums import CharacterCategory, ProbingState, SequenceLikelihood
SingleByteCharSetModel = namedtuple('SingleByteCharSetModel',
['charset_name',
'language',
'char_to_order_map',
'language_model',
'typical_positive_ratio',
'keep_ascii_letters',
'alphabet'])
class SingleByteCharSetProber(CharSetProber):
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2
......@@ -77,25 +65,25 @@ class SingleByteCharSetProber(CharSetProber):
if self._name_prober:
return self._name_prober.charset_name
else:
return self._model.charset_name
return self._model['charset_name']
@property
def language(self):
if self._name_prober:
return self._name_prober.language
else:
return self._model.language
return self._model.get('language')
def feed(self, byte_str):
# TODO: Make filter_international_words keep things in self.alphabet
if not self._model.keep_ascii_letters:
if not self._model['keep_english_letter']:
byte_str = self.filter_international_words(byte_str)
if not byte_str:
return self.state
char_to_order_map = self._model.char_to_order_map
language_model = self._model.language_model
for char in byte_str:
order = char_to_order_map.get(char, CharacterCategory.UNDEFINED)
char_to_order_map = self._model['char_to_order_map']
for i, c in enumerate(byte_str):
# XXX: Order is in range 1-64, so one would think we want 0-63 here,
# but that leads to 27 more test failures than before.
order = char_to_order_map[c]
# XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
# CharacterCategory.SYMBOL is actually 253, so we use CONTROL
# to make it closer to the original intent. The only difference
......@@ -103,21 +91,20 @@ class SingleByteCharSetProber(CharSetProber):
# _total_char purposes.
if order < CharacterCategory.CONTROL:
self._total_char += 1
# TODO: Follow uchardet's lead and discount confidence for frequent
# control characters.
# See https://github.com/BYVoid/uchardet/commit/55b4f23971db61
if order < self.SAMPLE_SIZE:
self._freq_char += 1
if self._last_order < self.SAMPLE_SIZE:
self._total_seqs += 1
if not self._reversed:
lm_cat = language_model[self._last_order][order]
else:
lm_cat = language_model[order][self._last_order]
self._seq_counters[lm_cat] += 1
i = (self._last_order * self.SAMPLE_SIZE) + order
model = self._model['precedence_matrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * self.SAMPLE_SIZE) + self._last_order
model = self._model['precedence_matrix'][i]
self._seq_counters[model] += 1
self._last_order = order
charset_name = self._model.charset_name
charset_name = self._model['charset_name']
if self.state == ProbingState.DETECTING:
if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
confidence = self.get_confidence()
......@@ -138,7 +125,7 @@ class SingleByteCharSetProber(CharSetProber):
r = 0.01
if self._total_seqs > 0:
r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
self._total_seqs / self._model.typical_positive_ratio)
self._total_seqs / self._model['typical_positive_ratio'])
r = r * self._freq_char / self._total_char
if r >= 1.0:
r = 0.99
......
......@@ -27,57 +27,47 @@
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .hebrewprober import HebrewProber
from .langbulgarianmodel import (ISO_8859_5_BULGARIAN_MODEL,
WINDOWS_1251_BULGARIAN_MODEL)
from .langgreekmodel import ISO_8859_7_GREEK_MODEL, WINDOWS_1253_GREEK_MODEL
from .langhebrewmodel import WINDOWS_1255_HEBREW_MODEL
# from .langhungarianmodel import (ISO_8859_2_HUNGARIAN_MODEL,
# WINDOWS_1250_HUNGARIAN_MODEL)
from .langrussianmodel import (IBM855_RUSSIAN_MODEL, IBM866_RUSSIAN_MODEL,
ISO_8859_5_RUSSIAN_MODEL, KOI8_R_RUSSIAN_MODEL,
MACCYRILLIC_RUSSIAN_MODEL,
WINDOWS_1251_RUSSIAN_MODEL)
from .langthaimodel import TIS_620_THAI_MODEL
from .langturkishmodel import ISO_8859_9_TURKISH_MODEL
from .sbcharsetprober import SingleByteCharSetProber
from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
Latin5CyrillicModel, MacCyrillicModel,
Ibm866Model, Ibm855Model)
from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from .langthaimodel import TIS620ThaiModel
from .langhebrewmodel import Win1255HebrewModel
from .hebrewprober import HebrewProber
from .langturkishmodel import Latin5TurkishModel
class SBCSGroupProber(CharSetGroupProber):
def __init__(self):
super(SBCSGroupProber, self).__init__()
hebrew_prober = HebrewProber()
logical_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL,
False, hebrew_prober)
# TODO: See if using ISO-8859-8 Hebrew model works better here, since
# it's actually the visual one
visual_hebrew_prober = SingleByteCharSetProber(WINDOWS_1255_HEBREW_MODEL,
True, hebrew_prober)
hebrew_prober.set_model_probers(logical_hebrew_prober,
visual_hebrew_prober)
# TODO: ORDER MATTERS HERE. I changed the order vs what was in master
# and several tests failed that did not before. Some thought
# should be put into the ordering, and we should consider making
# order not matter here, because that is very counter-intuitive.
self.probers = [
SingleByteCharSetProber(WINDOWS_1251_RUSSIAN_MODEL),
SingleByteCharSetProber(KOI8_R_RUSSIAN_MODEL),
SingleByteCharSetProber(ISO_8859_5_RUSSIAN_MODEL),
SingleByteCharSetProber(MACCYRILLIC_RUSSIAN_MODEL),
SingleByteCharSetProber(IBM866_RUSSIAN_MODEL),
SingleByteCharSetProber(IBM855_RUSSIAN_MODEL),
SingleByteCharSetProber(ISO_8859_7_GREEK_MODEL),
SingleByteCharSetProber(WINDOWS_1253_GREEK_MODEL),
SingleByteCharSetProber(ISO_8859_5_BULGARIAN_MODEL),
SingleByteCharSetProber(WINDOWS_1251_BULGARIAN_MODEL),
SingleByteCharSetProber(Win1251CyrillicModel),
SingleByteCharSetProber(Koi8rModel),
SingleByteCharSetProber(Latin5CyrillicModel),
SingleByteCharSetProber(MacCyrillicModel),
SingleByteCharSetProber(Ibm866Model),
SingleByteCharSetProber(Ibm855Model),
SingleByteCharSetProber(Latin7GreekModel),
SingleByteCharSetProber(Win1253GreekModel),
SingleByteCharSetProber(Latin5BulgarianModel),
SingleByteCharSetProber(Win1251BulgarianModel),
# TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
# after we retrain model.
# SingleByteCharSetProber(ISO_8859_2_HUNGARIAN_MODEL),
# SingleByteCharSetProber(WINDOWS_1250_HUNGARIAN_MODEL),
SingleByteCharSetProber(TIS_620_THAI_MODEL),
SingleByteCharSetProber(ISO_8859_9_TURKISH_MODEL),
hebrew_prober,
logical_hebrew_prober,
visual_hebrew_prober,
# SingleByteCharSetProber(Latin2HungarianModel),
# SingleByteCharSetProber(Win1250HungarianModel),
SingleByteCharSetProber(TIS620ThaiModel),
SingleByteCharSetProber(Latin5TurkishModel),
]
hebrew_prober = HebrewProber()
logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
False, hebrew_prober)
visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
hebrew_prober)
hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
self.probers.extend([hebrew_prober, logical_hebrew_prober,
visual_hebrew_prober])
self.reset()
......@@ -266,7 +266,7 @@ class UniversalDetector(object):
'language': max_prober.language}
# Log all prober confidences if none met MINIMUM_THRESHOLD
if self.logger.getEffectiveLevel() <= logging.DEBUG:
if self.logger.getEffectiveLevel() == logging.DEBUG:
if self.result['encoding'] is None:
self.logger.debug('no probers hit minimum threshold')
for group_prober in self._charset_probers:
......@@ -280,7 +280,7 @@ class UniversalDetector(object):
prober.get_confidence())
else:
self.logger.debug('%s %s confidence = %s',
group_prober.charset_name,
group_prober.language,
group_prober.get_confidence())
prober.charset_name,
prober.language,
prober.get_confidence())
return self.result
......@@ -5,5 +5,5 @@ from within setup.py and from chardet subpackages.
:author: Dan Blanchard (dan.blanchard@gmail.com)
"""
__version__ = "4.0.0"
__version__ = "3.0.4"
VERSION = __version__.split('.')
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
Metadata-Version: 2.1
Name: requests
Version: 2.25.1
Summary: Python HTTP for Humans.
Home-page: https://requests.readthedocs.io
Author: Kenneth Reitz
Author-email: me@kennethreitz.org
License: Apache 2.0
Project-URL: Documentation, https://requests.readthedocs.io
Project-URL: Source, https://github.com/psf/requests
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*
Description-Content-Type: text/markdown
Requires-Dist: chardet (<5,>=3.0.2)
Requires-Dist: idna (<3,>=2.5)
Requires-Dist: urllib3 (<1.27,>=1.21.1)
Requires-Dist: certifi (>=2017.4.17)
Provides-Extra: security
Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'security'
Requires-Dist: cryptography (>=1.3.4) ; extra == 'security'
Provides-Extra: socks
Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
Requires-Dist: win-inet-pton ; (sys_platform == "win32" and python_version == "2.7") and extra == 'socks'
# Requests
**Requests** is a simple, yet elegant HTTP library.
```python
>>> import requests
>>> r = requests.get('https://api.github.com/user', auth=('user', 'pass'))
>>> r.status_code
200
>>> r.headers['content-type']
'application/json; charset=utf8'
>>> r.encoding
'utf-8'
>>> r.text
'{"type":"User"...'
>>> r.json()
{'disk_usage': 368627, 'private_gists': 484, ...}
```
Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
Requests is one of the most downloaded Python package today, pulling in around `14M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `500,000+` repositories. You may certainly put your trust in this code.
[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests/month)
[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
## Installing Requests and Supported Versions
Requests is available on PyPI:
```console
$ python -m pip install requests
```
Requests officially supports Python 2.7 & 3.5+.
## Supported Features & Best–Practices
Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
- Keep-Alive & Connection Pooling
- International Domains and URLs
- Sessions with Cookie Persistence
- Browser-style TLS/SSL Verification
- Basic & Digest Authentication
- Familiar `dict`–like Cookies
- Automatic Content Decompression and Decoding
- Multi-part File Uploads
- SOCKS Proxy Support
- Connection Timeouts
- Streaming Downloads
- Automatic honoring of `.netrc`
- Chunked HTTP Requests
## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
[![Read the Docs](https://raw.githubusercontent.com/psf/requests/master/ext/ss.png)](https://requests.readthedocs.io)
---
[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/master/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/master/ext/psf.png)](https://www.python.org/psf)
requests/__init__.py,sha256=rsmg7xmbbCE_zmDcG6EDk_pyvdEfadztdBaWIkInlH8,4141
requests/__version__.py,sha256=k4J8c1yFRFzwGWwlN7miaDOclFtbcIs1GlnmT17YbXQ,441
requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
requests/adapters.py,sha256=WelSM1BCQXdbjEuDsBxqKDADeY8BHmxlrwbNnLN2rr4,21344
requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496
requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207
requests/certs.py,sha256=dOB5rV2DZ13dEhq9BUa_4hd5kAqg59e_zUZB00faYz8,453
requests/compat.py,sha256=iBRvu-X540CH4PJsuxr0vcGTnl_TZhq_75SwmeckQ7w,1782
requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
requests/exceptions.py,sha256=xXoj1rdhnxTS_DYphKZ9OvFZJQZ333A64REc9ZDZIgU,3161
requests/help.py,sha256=lLcBtKAar8T6T78e9Tc4Zfd_EEJFhntxgib1JHNctEI,3515
requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
requests/models.py,sha256=Uhb4Ra_ubNGBf-6ktHShgO5mUSCGZKa5D_wLGVCMtYk,34308
requests/packages.py,sha256=Q2rF0L5mc3wQAvc6q_lAVtPTDOaOeFgD-7kWSQLkjEQ,542
requests/sessions.py,sha256=BsnR-zYILgoFzJ6yq4T8ht_i0PwwPGVAxWxWaV5dcHg,30137
requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188
requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005
requests/utils.py,sha256=_K9AgkN6efPe-a-zgZurXzds5PBC0CzDkyjAE2oCQFQ,30529
requests-2.25.1.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142
requests-2.25.1.dist-info/METADATA,sha256=RuNh38uN0IMsRT3OwaTNB_WyGx6RMwwQoMwujXfkUVM,4168
requests-2.25.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
requests-2.25.1.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9
requests-2.25.1.dist-info/RECORD,,
requests-2.25.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
requests/__pycache__/_internal_utils.cpython-37.pyc,,
requests/__pycache__/help.cpython-37.pyc,,
requests/__pycache__/structures.cpython-37.pyc,,
requests/__pycache__/certs.cpython-37.pyc,,
requests/__pycache__/exceptions.cpython-37.pyc,,
requests/__pycache__/__version__.cpython-37.pyc,,
requests/__pycache__/status_codes.cpython-37.pyc,,
requests/__pycache__/sessions.cpython-37.pyc,,
requests/__pycache__/models.cpython-37.pyc,,
requests/__pycache__/compat.cpython-37.pyc,,
requests/__pycache__/adapters.cpython-37.pyc,,
requests/__pycache__/packages.cpython-37.pyc,,
requests/__pycache__/auth.cpython-37.pyc,,
requests/__pycache__/hooks.cpython-37.pyc,,
requests/__pycache__/utils.cpython-37.pyc,,
requests/__pycache__/cookies.cpython-37.pyc,,
requests/__pycache__/api.cpython-37.pyc,,
requests/__pycache__/__init__.cpython-37.pyc,,
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
......@@ -57,16 +57,18 @@ def check_compatibility(urllib3_version, chardet_version):
# Check urllib3 for compatibility.
major, minor, patch = urllib3_version # noqa: F811
major, minor, patch = int(major), int(minor), int(patch)
# urllib3 >= 1.21.1, <= 1.26
# urllib3 >= 1.21.1, <= 1.25
assert major == 1
assert minor >= 21
assert minor <= 26
assert minor <= 25
# Check chardet for compatibility.
major, minor, patch = chardet_version.split('.')[:3]
major, minor, patch = int(major), int(minor), int(patch)
# chardet >= 3.0.2, < 5.0.0
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
# chardet >= 3.0.2, < 3.1.0
assert major == 3
assert minor < 1
assert patch >= 2
def _check_cryptography(cryptography_version):
......
......@@ -5,8 +5,8 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'https://requests.readthedocs.io'
__version__ = '2.25.1'
__build__ = 0x022501
__version__ = '2.24.0'
__build__ = 0x022400
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'
......
......@@ -273,9 +273,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Instances are generated from a :class:`Request <Request>` object, and
should not be instantiated manually; doing so may produce undesirable
effects.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
......
# -*- coding: utf-8 -*-
"""
requests.sessions
~~~~~~~~~~~~~~~~~
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
......@@ -387,13 +387,6 @@ class Session(SessionRedirectMixin):
self.stream = False
#: SSL Verification default.
#: Defaults to `True`, requiring requests to verify the TLS certificate at the
#: remote end.
#: If verify is set to `False`, requests will accept any TLS certificate
#: presented by the server, and will ignore hostname mismatches and/or
#: expired certificates, which will make your application vulnerable to
#: man-in-the-middle (MitM) attacks.
#: Only set this to `False` for testing.
self.verify = True
#: SSL client certificate default, if String, path to ssl client
......@@ -502,12 +495,7 @@ class Session(SessionRedirectMixin):
content. Defaults to ``False``.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use. Defaults to ``True``. When set to
``False``, requests will accept any TLS certificate presented by
the server, and will ignore hostname mismatches and/or expired
certificates, which will make your application vulnerable to
man-in-the-middle (MitM) attacks. Setting verify to ``False``
may be useful during local development or testing.
to a CA bundle to use. Defaults to ``True``.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
:rtype: requests.Response
......
......@@ -169,20 +169,14 @@ def super_len(o):
def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc."""
netrc_file = os.environ.get('NETRC')
if netrc_file is not None:
netrc_locations = (netrc_file,)
else:
netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
try:
from netrc import netrc, NetrcParseError
netrc_path = None
for f in netrc_locations:
for f in NETRC_FILES:
try:
loc = os.path.expanduser(f)
loc = os.path.expanduser('~/{}'.format(f))
except KeyError:
# os.path.expanduser can fail when $HOME is undefined and
# getpwuid fails. See https://bugs.python.org/issue20164 &
......@@ -218,7 +212,7 @@ def get_netrc_auth(url, raise_errors=False):
if raise_errors:
raise
# App Engine hackiness.
# AppEngine hackiness.
except (ImportError, AttributeError):
pass
......@@ -503,10 +497,6 @@ def get_encoding_from_headers(headers):
if 'text' in content_type:
return 'ISO-8859-1'
if 'application/json' in content_type:
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
return 'utf-8'
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
......
MIT License
Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Metadata-Version: 2.1
Name: urllib3
Version: 1.26.3
Summary: HTTP library with thread-safe connection pooling, file post, and more.
Home-page: https://urllib3.readthedocs.io/
Author: Andrey Petrov
Author-email: andrey.petrov@shazow.net
License: MIT
Project-URL: Documentation, https://urllib3.readthedocs.io/
Project-URL: Code, https://github.com/urllib3/urllib3
Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
Keywords: urllib httplib threadsafe filepost http https ssl pooling
Platform: UNKNOWN
Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Software Development :: Libraries
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
Description-Content-Type: text/x-rst
Provides-Extra: brotli
Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
Provides-Extra: secure
Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
Requires-Dist: certifi ; extra == 'secure'
Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
Provides-Extra: socks
Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
Python ecosystem already uses urllib3 and you should too.
urllib3 brings many critical features that are missing from the Python
standard libraries:
- Thread safety.
- Connection pooling.
- Client-side SSL/TLS verification.
- File uploads with multipart encoding.
- Helpers for retrying requests and dealing with HTTP redirects.
- Support for gzip, deflate, and brotli encoding.
- Proxy support for HTTP and SOCKS.
- 100% test coverage.
urllib3 is powerful and easy to use:
.. code-block:: python
>>> import urllib3
>>> http = urllib3.PoolManager()
>>> r = http.request('GET', 'http://httpbin.org/robots.txt')
>>> r.status
200
>>> r.data
'User-agent: *\nDisallow: /deny\n'
Installing
----------
urllib3 can be installed with `pip <https://pip.pypa.io>`_::
$ python -m pip install urllib3
Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
$ git clone git://github.com/urllib3/urllib3.git
$ python setup.py install
Documentation
-------------
urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
Contributing
------------
urllib3 happily accepts contributions. Please see our
`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
for some tips on getting started.
Security Disclosures
--------------------
To report a security vulnerability, please use the
`Tidelift security contact <https://tidelift.com/security>`_.
Tidelift will coordinate the fix and disclosure with maintainers.
Maintainers
-----------
- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
👋
Sponsorship
-----------
If your company benefits from this library, please consider `sponsoring its
development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
For Enterprise
--------------
.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
:width: 75
:alt: Tidelift
.. list-table::
:widths: 10 100
* - |tideliftlogo|
- Professional support for urllib3 is available as part of the `Tidelift
Subscription`_. Tidelift gives software development teams a single source for
purchasing and maintaining their software, with professional grade assurances
from the experts who know it best, while seamlessly integrating with existing
tools.
.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
Changes
=======
1.26.3 (2021-01-26)
-------------------
* Fixed bytes and string comparison issue with headers (Pull #2141)
* Changed ``ProxySchemeUnknown`` error message to be
more actionable if the user supplies a proxy URL without
a scheme. (Pull #2107)
1.26.2 (2020-11-12)
-------------------
* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
be imported properly on Python 2.7.8 and earlier (Pull #2052)
1.26.1 (2020-11-11)
-------------------
* Fixed an issue where two ``User-Agent`` headers would be sent if a
``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
1.26.0 (2020-11-10)
-------------------
* **NOTE: urllib3 v2.0 will drop support for Python 2**.
`Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
still wish to use TLS earlier than 1.2 without a deprecation warning
should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
**Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
(Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
* Added default ``User-Agent`` header to every request (Pull #1750)
* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``,
and ``Host`` headers from being automatically emitted with requests (Pull #2018)
* Collapse ``transfer-encoding: chunked`` request data and framing into
the same ``socket.send()`` call (Pull #1906)
* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
to SecureTransport (Pull #1903)
* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
* Suppress ``BrokenPipeError`` when writing request body after the server
has closed the socket (Pull #1524)
* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
into an ``urllib3.exceptions.SSLError`` (Pull #1939)
1.25.11 (2020-10-19)
--------------------
* Fix retry backoff time parsed from ``Retry-After`` header when given
in the HTTP date format. The HTTP date was parsed as the local timezone
rather than accounting for the timezone in the HTTP date (typically
UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
environment variable was set to the empty string. Now ``SSLContext.keylog_file``
is not set in this situation (Pull #2016)
1.25.10 (2020-07-22)
--------------------
* Added support for ``SSLKEYLOGFILE`` environment variable for
logging TLS session keys with use with programs like
Wireshark for decrypting captured web traffic (Pull #1867)
* Fixed loading of SecureTransport libraries on macOS Big Sur
due to the new dynamic linker cache (Pull #1905)
* Collapse chunked request bodies data and framing into one
call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
* Don't insert ``None`` into ``ConnectionPool`` if the pool
was empty when requesting a connection (Pull #1866)
* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
1.25.9 (2020-04-16)
-------------------
* Added ``InvalidProxyConfigurationWarning`` which is raised when
erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
support connecting to HTTPS proxies but will soon be able to
and we would like users to migrate properly without much breakage.
See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
for more information on how to fix your proxy config. (Pull #1851)
* Drain connection after ``PoolManager`` redirect (Pull #1817)
* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
* Allow the CA certificate data to be passed as a string (Pull #1804)
* Raise ``ValueError`` if method contains control characters (Pull #1800)
* Add ``__repr__`` to ``Timeout`` (Pull #1795)
1.25.8 (2020-01-20)
-------------------
* Drop support for EOL Python 3.4 (Pull #1774)
* Optimize _encode_invalid_chars (Pull #1787)
1.25.7 (2019-11-11)
-------------------
* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
* Fix issue where URL fragment was sent within the request target. (Pull #1732)
* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
1.25.6 (2019-09-24)
-------------------
* Fix issue where tilde (``~``) characters were incorrectly
percent-encoded in the path. (Pull #1692)
1.25.5 (2019-09-19)
-------------------
* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
(Issue #1682)
1.25.4 (2019-09-19)
-------------------
* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
* Fix edge case where Retry-After header was still respected even when
explicitly opted out of. (Pull #1607)
* Remove dependency on ``rfc3986`` for URL parsing.
* Fix issue where URLs containing invalid characters within ``Url.auth`` would
raise an exception instead of percent-encoding those characters.
* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
work well with BufferedReaders and other ``io`` module features. (Pull #1652)
* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
1.25.3 (2019-05-23)
-------------------
* Change ``HTTPSConnection`` to load system CA certificates
when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
unspecified. (Pull #1608, Issue #1603)
* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
1.25.2 (2019-04-28)
-------------------
* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
* Change ``parse_url`` to percent-encode invalid characters within the
path, query, and target components. (Pull #1586)
1.25.1 (2019-04-24)
-------------------
* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
1.25 (2019-04-22)
-----------------
* Require and validate certificates by default when using HTTPS (Pull #1507)
* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
implementations. (Pull #1496)
* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
* Fixed issue where OpenSSL would block if an encrypted client private key was
given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
* Added support for Brotli content encoding. It is enabled automatically if
``brotlipy`` package is installed which can be requested with
``urllib3[brotli]`` extra. (Pull #1532)
* Drop ciphers using DSS key exchange from default TLS cipher suites.
Improve default ciphers when using SecureTransport. (Pull #1496)
* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
1.24.3 (2019-05-01)
-------------------
* Apply fix for CVE-2019-9740. (Pull #1591)
1.24.2 (2019-04-17)
-------------------
* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
``ssl_context`` parameters are specified.
* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
1.24.1 (2018-11-02)
-------------------
* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
1.24 (2018-10-16)
-----------------
* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
* Test against Python 3.7 on AppVeyor. (Pull #1453)
* Early-out ipv6 checks when running on App Engine. (Pull #1450)
* Change ambiguous description of backoff_factor (Pull #1436)
* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
* Add a server_hostname parameter to HTTPSConnection which allows for
overriding the SNI hostname sent in the handshake. (Pull #1397)
* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
* Fixed bug where responses with header Content-Type: message/* erroneously
raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
* Move urllib3 to src/urllib3 (Pull #1409)
1.23 (2018-06-04)
-----------------
* Allow providing a list of headers to strip from requests when redirecting
to a different host. Defaults to the ``Authorization`` header. Different
headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
* Dropped Python 3.3 support. (Pull #1242)
* Put the connection back in the pool when calling stream() or read_chunked() on
a chunked HEAD response. (Issue #1234)
* Fixed pyOpenSSL-specific ssl client authentication issue when clients
attempted to auth via certificate + chain (Issue #1060)
* Add the port to the connectionpool connect print (Pull #1251)
* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
* Added support for auth info in url for SOCKS proxy (Pull #1363)
1.22 (2017-07-20)
-----------------
* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
IPv6 proxy. (Issue #1222)
* Made the connection pool retry on ``SSLError``. The original ``SSLError``
is available on ``MaxRetryError.reason``. (Issue #1112)
* Drain and release connection before recursing on retry/redirect. Fixes
deadlocks with a blocking connectionpool. (Issue #1167)
* Fixed compatibility for cookiejar. (Issue #1229)
* pyopenssl: Use vendored version of ``six``. (Issue #1231)
1.21.1 (2017-05-02)
-------------------
* Fixed SecureTransport issue that would cause long delays in response body
delivery. (Pull #1154)
* Fixed regression in 1.21 that threw exceptions when users passed the
``socket_options`` flag to the ``PoolManager``. (Issue #1165)
* Fixed regression in 1.21 that threw exceptions when users passed the
``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
(Pull #1157)
1.21 (2017-04-25)
-----------------
* Improved performance of certain selector system calls on Python 3.5 and
later. (Pull #1095)
* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
exceptions appropriately when sending data. (Pull #1125)
* Selectors now detects a monkey-patched select module after import for modules
that patch the select module like eventlet, greenlet. (Pull #1128)
* Reduced memory consumption when streaming zlib-compressed responses
(as opposed to raw deflate streams). (Pull #1129)
* Connection pools now use the entire request context when constructing the
pool key. (Pull #1016)
* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
(Pull #1016)
* Add retry counter for ``status_forcelist``. (Issue #1147)
* Added ``contrib`` module for using SecureTransport on macOS:
``urllib3.contrib.securetransport``. (Pull #1122)
* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
for schemes it does not recognise, it assumes they are case-sensitive and
leaves them unchanged.
(Issue #1080)
1.20 (2017-01-19)
-----------------
* Added support for waiting for I/O using selectors other than select,
improving urllib3's behaviour with large numbers of concurrent connections.
(Pull #1001)
* Updated the date for the system clock check. (Issue #1005)
* ConnectionPools now correctly consider hostnames to be case-insensitive.
(Issue #1032)
* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
to fail when it is injected, rather than at first use. (Pull #1063)
* Outdated versions of cryptography now cause the PyOpenSSL contrib module
to fail when it is injected, rather than at first use. (Issue #1044)
* Automatically attempt to rewind a file-like body object when a request is
retried or redirected. (Pull #1039)
* Fix some bugs that occur when modules incautiously patch the queue module.
(Pull #1061)
* Prevent retries from occurring on read timeouts for which the request method
was not in the method whitelist. (Issue #1059)
* Changed the PyOpenSSL contrib module to lazily load idna to avoid
unnecessarily bloating the memory of programs that don't need it. (Pull
#1076)
* Add support for IPv6 literals with zone identifiers. (Pull #1013)
* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
proxies, and controlled remote DNS appropriately. (Issue #1035)
1.19.1 (2016-11-16)
-------------------
* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
1.19 (2016-11-03)
-----------------
* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
using the default retry logic. (Pull #955)
* Remove markers from setup.py to assist ancient setuptools versions. (Issue
#986)
* Disallow superscripts and other integerish things in URL ports. (Issue #989)
* Allow urllib3's HTTPResponse.stream() method to continue to work with
non-httplib underlying FPs. (Pull #990)
* Empty filenames in multipart headers are now emitted as such, rather than
being suppressed. (Issue #1015)
* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
1.18.1 (2016-10-27)
-------------------
* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
release fixes a vulnerability whereby urllib3 in the above configuration
would silently fail to validate TLS certificates due to erroneously setting
invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
flags do not cause a problem in OpenSSL versions before 1.1.0, which
interprets the presence of any flag as requesting certificate validation.
There is no PR for this patch, as it was prepared for simultaneous disclosure
and release. The master branch received the same fix in Pull #1010.
1.18 (2016-09-26)
-----------------
* Fixed incorrect message for IncompleteRead exception. (Pull #973)
* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
(Issue #258)
* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
(Issue #977)
* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
1.17 (2016-09-06)
-----------------
* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
* Substantially refactored documentation. (Issue #887)
* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
(Issue #858)
* Normalize the scheme and host in the URL parser (Issue #833)
* ``HTTPResponse`` contains the last ``Retry`` object, which now also
contains retries history. (Issue #848)
* Timeout can no longer be set as boolean, and must be greater than zero.
(Pull #924)
* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
now use cryptography and idna, both of which are already dependencies of
PyOpenSSL. (Pull #930)
* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
* Try to use the operating system's certificates when we are using an
``SSLContext``. (Pull #941)
* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
* Implemented ``length_remaining`` to determine remaining content
to be read. (Pull #949)
* Implemented ``enforce_content_length`` to enable exceptions when
incomplete data chunks are received. (Pull #949)
* Dropped connection start, dropped connection reset, redirect, forced retry,
and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
1.16 (2016-06-11)
-----------------
* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
overridden. (Issue #830)
* Normalize scheme and host to lowercase for pool keys, and include
``source_address``. (Issue #830)
* Cleaner exception chain in Python 3 for ``_make_request``.
(Issue #861)
* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
called by subclasses. (Issue #873)
* Retain ``release_conn`` state across retries. (Issues #651, #866)
* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
1.15.1 (2016-04-11)
-------------------
* Fix packaging to include backports module. (Issue #841)
1.15 (2016-04-06)
-----------------
* Added Retry(raise_on_status=False). (Issue #720)
* Always use setuptools, no more distutils fallback. (Issue #785)
* Dropped support for Python 3.2. (Issue #786)
* Chunked transfer encoding when requesting with ``chunked=True``.
(Issue #790)
* Fixed regression with IPv6 port parsing. (Issue #801)
* Append SNIMissingWarning messages to allow users to specify it in
the PYTHONWARNINGS environment variable. (Issue #816)
* Handle unicode headers in Py2. (Issue #818)
* Log certificate when there is a hostname mismatch. (Issue #820)
* Preserve order of request/response headers. (Issue #821)
1.14 (2015-12-29)
-----------------
* contrib: SOCKS proxy support! (Issue #762)
* Fixed AppEngine handling of transfer-encoding header and bug
in Timeout defaults checking. (Issue #763)
1.13.1 (2015-12-18)
-------------------
* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
1.13 (2015-12-14)
-----------------
* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
* Close connections more defensively on exception. (Issue #734)
* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
repeatedly flushing the decoder, to function better on Jython. (Issue #743)
* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
1.12 (2015-09-03)
-----------------
* Rely on ``six`` for importing ``httplib`` to work around
conflicts with other Python 3 shims. (Issue #688)
* Add support for directories of certificate authorities, as supported by
OpenSSL. (Issue #701)
* New exception: ``NewConnectionError``, raised when we fail to establish
a new connection, usually ``ECONNREFUSED`` socket error.
1.11 (2015-07-21)
-----------------
* When ``ca_certs`` is given, ``cert_reqs`` defaults to
``'CERT_REQUIRED'``. (Issue #650)
* ``pip install urllib3[secure]`` will install Certifi and
PyOpenSSL as dependencies. (Issue #678)
* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
(Issues #632, #679)
* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
which has an ``AppEngineManager`` for using ``URLFetch`` in a
Google AppEngine environment. (Issue #664)
* Dev: Added test suite for AppEngine. (Issue #631)
* Fix performance regression when using PyOpenSSL. (Issue #626)
* Passing incorrect scheme (e.g. ``foo://``) will raise
``ValueError`` instead of ``AssertionError`` (backwards
compatible for now, but please migrate). (Issue #640)
* Fix pools not getting replenished when an error occurs during a
request using ``release_conn=False``. (Issue #644)
* Fix pool-default headers not applying for url-encoded requests
like GET. (Issue #657)
* log.warning in Python 3 when headers are skipped due to parsing
errors. (Issue #642)
* Close and discard connections if an error occurs during read.
(Issue #660)
* Fix host parsing for IPv6 proxies. (Issue #668)
* Separate warning type SubjectAltNameWarning, now issued once
per host. (Issue #671)
* Fix ``httplib.IncompleteRead`` not getting converted to
``ProtocolError`` when using ``HTTPResponse.stream()``
(Issue #674)
1.10.4 (2015-05-03)
-------------------
* Migrate tests to Tornado 4. (Issue #594)
* Append default warning configuration rather than overwrite.
(Issue #603)
* Fix streaming decoding regression. (Issue #595)
* Fix chunked requests losing state across keep-alive connections.
(Issue #599)
* Fix hanging when chunked HEAD response has no body. (Issue #605)
1.10.3 (2015-04-21)
-------------------
* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
(Issue #558)
* Fix regression of duplicate header keys being discarded.
(Issue #563)
* ``Response.stream()`` returns a generator for chunked responses.
(Issue #560)
* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
(Issue #585)
* Work on platforms without `ssl` module for plain HTTP requests.
(Issue #587)
* Stop relying on the stdlib's default cipher list. (Issue #588)
1.10.2 (2015-02-25)
-------------------
* Fix file descriptor leakage on retries. (Issue #548)
* Removed RC4 from default cipher list. (Issue #551)
* Header performance improvements. (Issue #544)
* Fix PoolManager not obeying redirect retry settings. (Issue #553)
1.10.1 (2015-02-10)
-------------------
* Pools can be used as context managers. (Issue #545)
* Don't re-use connections which experienced an SSLError. (Issue #529)
* Don't fail when gzip decoding an empty stream. (Issue #535)
* Add sha256 support for fingerprint verification. (Issue #540)
* Fixed handling of header values containing commas. (Issue #533)
1.10 (2014-12-14)
-----------------
* Disabled SSLv3. (Issue #473)
* Add ``Url.url`` property to return the composed url string. (Issue #394)
* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
* ``MaxRetryError.reason`` will always be an exception, not string.
(Issue #481)
* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
(Issue #496)
* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
(Issue #499)
* Close and discard sockets which experienced SSL-related errors.
(Issue #501)
* Handle ``body`` param in ``.request(...)``. (Issue #513)
* Respect timeout with HTTPS proxy. (Issue #505)
* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
1.9.1 (2014-09-13)
------------------
* Apply socket arguments before binding. (Issue #427)
* More careful checks if fp-like object is closed. (Issue #435)
* Fixed packaging issues of some development-related files not
getting included. (Issue #440)
* Allow performing *only* fingerprint verification. (Issue #444)
* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
(Issue #443)
1.9 (2014-07-04)
----------------
* Shuffled around development-related files. If you're maintaining a distro
package of urllib3, you may need to tweak things. (Issue #415)
* Unverified HTTPS requests will trigger a warning on the first request. See
our new `security documentation
<https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
(Issue #426)
* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
(Issue #326)
* All raised exceptions should now wrapped in a
``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
* All errors during a retry-enabled request should be wrapped in
``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
which were previously exempt. Underlying error is accessible from the
``.reason`` property. (Issue #326)
* ``urllib3.exceptions.ConnectionError`` renamed to
``urllib3.exceptions.ProtocolError``. (Issue #326)
* Errors during response read (such as IncompleteRead) are now wrapped in
``urllib3.exceptions.ProtocolError``. (Issue #418)
* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
(Issue #417)
* Catch read timeouts over SSL connections as
``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
* Apply socket arguments before connecting. (Issue #427)
1.8.3 (2014-06-23)
------------------
* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
* Wrap ``socket.timeout`` exception with
``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
* Fixed proxy-related bug where connections were being reused incorrectly.
(Issues #366, #369)
* Added ``socket_options`` keyword parameter which allows to define
``setsockopt`` configuration of new sockets. (Issue #397)
* Removed ``HTTPConnection.tcp_nodelay`` in favor of
``HTTPConnection.default_socket_options``. (Issue #397)
* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
1.8.2 (2014-04-17)
------------------
* Fix ``urllib3.util`` not being included in the package.
1.8.1 (2014-04-17)
------------------
* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
for the test suite. (Issue #362)
* Added support for specifying ``source_address``. (Issue #352)
1.8 (2014-03-04)
----------------
* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
username, and blank ports like 'hostname:').
* New ``urllib3.connection`` module which contains all the HTTPConnection
objects.
* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
signature to a more sensible order. [Backwards incompatible]
(Issues #252, #262, #263)
* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
returns the number of bytes read so far. (Issue #277)
* Support for platforms without threading. (Issue #289)
* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
to allow a pool with no specified port to be considered equal to to an
HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
* Improved default SSL/TLS settings to avoid vulnerabilities.
(Issue #309)
* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
(Issue #310)
* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
will send the entire HTTP request ~200 milliseconds faster; however, some of
the resulting TCP packets will be smaller. (Issue #254)
* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
from the default 64 to 1024 in a single certificate. (Issue #318)
* Headers are now passed and stored as a custom
``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
(Issue #329, #333)
* Headers no longer lose their case on Python 3. (Issue #236)
* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
certificates on inject. (Issue #332)
* Requests with ``retries=False`` will immediately raise any exceptions without
wrapping them in ``MaxRetryError``. (Issue #348)
* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
1.7.1 (2013-09-25)
------------------
* Added granular timeout support with new ``urllib3.util.Timeout`` class.
(Issue #231)
* Fixed Python 3.4 support. (Issue #238)
1.7 (2013-08-14)
----------------
* More exceptions are now pickle-able, with tests. (Issue #174)
* Fixed redirecting with relative URLs in Location header. (Issue #178)
* Support for relative urls in ``Location: ...`` header. (Issue #179)
* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
file-like functionality. (Issue #187)
* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
skip hostname verification for SSL connections. (Issue #194)
* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
generator wrapped around ``.read(...)``. (Issue #198)
* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
* Fixed thread race condition in
``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
* ``ProxyManager`` requests now include non-default port in ``Host: ...``
header. (Issue #217)
* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
* New ``RequestField`` object can be passed to the ``fields=...`` param which
can specify headers. (Issue #220)
* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
(Issue #221)
* Use international headers when posting file names. (Issue #119)
* Improved IPv6 support. (Issue #203)
1.6 (2013-04-25)
----------------
* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
* Improved SSL-related code. ``cert_req`` now optionally takes a string like
"REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
The string values reflect the suffix of the respective constant variable.
(Issue #130)
* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
closed proxy connections and larger read buffers. (Issue #135)
* Ensure the connection is closed if no data is received, fixes connection leak
on some platforms. (Issue #133)
* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
* Tests fixed to be compatible with Py26 again. (Issue #125)
* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
* Allow an explicit content type to be specified when encoding file fields.
(Issue #126)
* Exceptions are now pickleable, with tests. (Issue #101)
* Fixed default headers not getting passed in some cases. (Issue #99)
* Treat "content-encoding" header value as case-insensitive, per RFC 2616
Section 3.5. (Issue #110)
* "Connection Refused" SocketErrors will get retried rather than raised.
(Issue #92)
* Updated vendored ``six``, no longer overrides the global ``six`` module
namespace. (Issue #113)
* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
the exception that prompted the final retry. If ``reason is None`` then it
was due to a redirect. (Issue #92, #114)
* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
(Issue #149)
* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
that are not files. (Issue #111)
* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
against an arbitrary hostname (when connecting by IP or for misconfigured
servers). (Issue #140)
* Streaming decompression support. (Issue #159)
1.5 (2012-08-02)
----------------
* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
logging in urllib3.
* Native full URL parsing (including auth, path, query, fragment) available in
``urllib3.util.parse_url(url)``.
* Built-in redirect will switch method to 'GET' if status code is 303.
(Issue #11)
* ``urllib3.PoolManager`` strips the scheme and host before sending the request
uri. (Issue #8)
* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
based on the Content-Type header, fails.
* Fixed bug with pool depletion and leaking connections (Issue #76). Added
explicit connection closing on pool eviction. Added
``urllib3.PoolManager.clear()``.
* 99% -> 100% unit test coverage.
1.4 (2012-06-16)
----------------
* Minor AppEngine-related fixes.
* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
* Improved url parsing. (Issue #73)
* IPv6 url support. (Issue #72)
1.3 (2012-03-25)
----------------
* Removed pre-1.0 deprecated API.
* Refactored helpers into a ``urllib3.util`` submodule.
* Fixed multipart encoding to support list-of-tuples for keys with multiple
values. (Issue #48)
* Fixed multiple Set-Cookie headers in response not getting merged properly in
Python 3. (Issue #53)
* AppEngine support with Py27. (Issue #61)
* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
bytes.
1.2.2 (2012-02-06)
------------------
* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
1.2.1 (2012-02-05)
------------------
* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
which inherits from ``ValueError``.
1.2 (2012-01-29)
----------------
* Added Python 3 support (tested on 3.2.2)
* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
* Use ``select.poll`` instead of ``select.select`` for platforms that support
it.
* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
* Fixed ``ImportError`` during install when ``ssl`` module is not available.
(Issue #41)
* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
``eventlet``. Removed extraneous unsupported dummyserver testing backends.
Added socket-level tests.
* More tests. Achievement Unlocked: 99% Coverage.
1.1 (2012-01-07)
----------------
* Refactored ``dummyserver`` to its own root namespace module (used for
testing).
* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
Py32's ``ssl_match_hostname``. (Issue #25)
* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
#27)
* Fixed timeout-related bugs. (Issues #17, #23)
1.0.2 (2011-11-04)
------------------
* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
you're using the object manually. (Thanks pyos)
* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
wrapping the access log in a mutex. (Thanks @christer)
* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
1.0.1 (2011-10-10)
------------------
* Fixed a bug where the same connection would get returned into the pool twice,
causing extraneous "HttpConnectionPool is full" log warnings.
1.0 (2011-10-08)
----------------
* Added ``PoolManager`` with LRU expiration of connections (tested and
documented).
* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
with HTTPS proxies).
* Added optional partial-read support for responses when
``preload_content=False``. You can now make requests and just read the headers
without loading the content.
* Made response decoding optional (default on, same as before).
* Added optional explicit boundary string for ``encode_multipart_formdata``.
* Convenience request methods are now inherited from ``RequestMethods``. Old
helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
the new ``request(method, url, ...)``.
* Refactored code to be even more decoupled, reusable, and extendable.
* License header added to ``.py`` files.
* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
* Embettered all the things!
* Started writing this file.
0.4.1 (2011-07-17)
------------------
* Minor bug fixes, code cleanup.
0.4 (2011-03-01)
----------------
* Better unicode support.
* Added ``VerifiedHTTPSConnection``.
* Added ``NTLMConnectionPool`` in contrib.
* Minor improvements.
0.3.1 (2010-07-13)
------------------
* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
0.3 (2009-12-10)
----------------
* Added HTTPS support.
* Minor bug fixes.
* Refactored, broken backwards compatibility with 0.2.
* API to be treated as stable from this version forward.
0.2 (2008-11-17)
----------------
* Added unit tests.
* Bug fixes.
0.1 (2008-11-16)
----------------
* First release.
urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
urllib3/_version.py,sha256=nwhnFBUGZnvtqPFpVlkSZM65yja2NkSV_AVO6h9pNfw,63
urllib3/connection.py,sha256=78sb0uVOr87AdmCAUQ7z7BRyAr2y5CGzamEFoKHJeBo,18528
urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
urllib3/util/retry.py,sha256=tn168HDMUynFmXRP-uVaLRUOlbTEJikoB1RuZdwfCes,21366
urllib3/util/ssl_.py,sha256=fRqzRmoLfiDv5_tFzgFtGzlOuuHoLX0us7WUf7tYDAo,16269
urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
urllib3-1.26.3.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
urllib3-1.26.3.dist-info/METADATA,sha256=bfUq23qlxCvoPweVN1YGCu-yeErYC5EbVCmF6tUG4Qs,43231
urllib3-1.26.3.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
urllib3-1.26.3.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
urllib3-1.26.3.dist-info/RECORD,,
urllib3-1.26.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
urllib3/util/__pycache__/response.cpython-37.pyc,,
urllib3/util/__pycache__/timeout.cpython-37.pyc,,
urllib3/util/__pycache__/proxy.cpython-37.pyc,,
urllib3/util/__pycache__/url.cpython-37.pyc,,
urllib3/util/__pycache__/request.cpython-37.pyc,,
urllib3/util/__pycache__/connection.cpython-37.pyc,,
urllib3/util/__pycache__/wait.cpython-37.pyc,,
urllib3/util/__pycache__/ssltransport.cpython-37.pyc,,
urllib3/util/__pycache__/ssl_.cpython-37.pyc,,
urllib3/util/__pycache__/retry.cpython-37.pyc,,
urllib3/util/__pycache__/__init__.cpython-37.pyc,,
urllib3/util/__pycache__/queue.cpython-37.pyc,,
urllib3/__pycache__/response.cpython-37.pyc,,
urllib3/__pycache__/poolmanager.cpython-37.pyc,,
urllib3/__pycache__/_version.cpython-37.pyc,,
urllib3/__pycache__/exceptions.cpython-37.pyc,,
urllib3/__pycache__/request.cpython-37.pyc,,
urllib3/__pycache__/filepost.cpython-37.pyc,,
urllib3/__pycache__/connectionpool.cpython-37.pyc,,
urllib3/__pycache__/connection.cpython-37.pyc,,
urllib3/__pycache__/_collections.cpython-37.pyc,,
urllib3/__pycache__/fields.cpython-37.pyc,,
urllib3/__pycache__/__init__.cpython-37.pyc,,
urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,,
urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,,
urllib3/contrib/__pycache__/appengine.cpython-37.pyc,,
urllib3/contrib/__pycache__/socks.cpython-37.pyc,,
urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,,
urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,,
urllib3/contrib/__pycache__/__init__.cpython-37.pyc,,
urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,,
urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,,
urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,,
urllib3/packages/__pycache__/six.cpython-37.pyc,,
urllib3/packages/__pycache__/__init__.cpython-37.pyc,,
urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,,
urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,,
urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,,
urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,,
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
"""
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
urllib3 - Thread-safe connection pooling and re-using.
"""
from __future__ import absolute_import
# Set default logging handler to avoid "No handler found" warnings.
import logging
import warnings
from logging import NullHandler
from . import exceptions
from ._version import __version__
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
from ._version import __version__
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
......
......@@ -17,10 +17,9 @@ except ImportError: # Platform-specific: No threads available
from collections import OrderedDict
from .exceptions import InvalidHeader
from .packages import six
from .packages.six import iterkeys, itervalues
from .packages.six import iterkeys, itervalues, PY3
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
......@@ -175,7 +174,7 @@ class HTTPHeaderDict(MutableMapping):
def __ne__(self, other):
return not self.__eq__(other)
if six.PY2: # Python 2
if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
......
# This file is protected via CODEOWNERS
__version__ = "1.26.3"
__version__ = "1.25.11"
from __future__ import absolute_import
import re
import datetime
import logging
import os
import re
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL?
import ssl
......@@ -34,68 +30,66 @@ except NameError:
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
BrokenPipeError = BrokenPipeError
except NameError: # Python 2:
class BrokenPipeError(Exception):
pass
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import CertificateError, match_hostname
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
resolve_cert_reqs,
resolve_ssl_version,
assert_fingerprint,
create_urllib3_context,
ssl_wrap_socket,
)
from .util import connection
from ._collections import HTTPHeaderDict
log = logging.getLogger(__name__)
port_by_scheme = {"http": 80, "https": 443}
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
RECENT_DATE = datetime.date(2020, 7, 1)
RECENT_DATE = datetime.date(2019, 1, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass:
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
.. code-block:: python
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme["http"]
......@@ -118,10 +112,6 @@ class HTTPConnection(_HTTPConnection, object):
#: provided, we use the default options.
self.socket_options = kw.pop("socket_options", self.default_socket_options)
# Proxy options provided by the user.
self.proxy = kw.pop("proxy", None)
self.proxy_config = kw.pop("proxy_config", None)
_HTTPConnection.__init__(self, *args, **kw)
@property
......@@ -184,13 +174,10 @@ class HTTPConnection(_HTTPConnection, object):
return conn
def _is_using_tunnel(self):
# Google App Engine's httplib does not define _tunnel_host
return getattr(self, "_tunnel_host", None)
def _prepare_conn(self, conn):
self.sock = conn
if self._is_using_tunnel():
# Google App Engine's httplib does not define _tunnel_host
if getattr(self, "_tunnel_host", None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
......@@ -201,9 +188,7 @@ class HTTPConnection(_HTTPConnection, object):
self._prepare_conn(conn)
def putrequest(self, method, url, *args, **kwargs):
""""""
# Empty docstring because the indentation of CPython's implementation
# is broken but we don't want this method in our documentation.
"""Send a request to the server"""
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
if match:
raise ValueError(
......@@ -213,40 +198,17 @@ class HTTPConnection(_HTTPConnection, object):
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
def putheader(self, header, *values):
""""""
if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
_HTTPConnection.putheader(self, header, *values)
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
raise ValueError(
"urllib3.util.SKIP_HEADER only supports '%s'"
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
)
def request(self, method, url, body=None, headers=None):
if headers is None:
headers = {}
else:
# Avoid modifying the headers passed into .request()
headers = headers.copy()
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
headers["User-Agent"] = _get_default_user_agent()
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = headers or {}
header_keys = set([six.ensure_str(k.lower()) for k in headers])
skip_accept_encoding = "accept-encoding" in header_keys
skip_host = "host" in header_keys
headers = HTTPHeaderDict(headers if headers is not None else {})
skip_accept_encoding = "accept-encoding" in headers
skip_host = "host" in headers
self.putrequest(
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
)
if "user-agent" not in header_keys:
self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
if "transfer-encoding" not in headers:
......@@ -274,11 +236,6 @@ class HTTPConnection(_HTTPConnection, object):
class HTTPSConnection(HTTPConnection):
"""
Many of the parameters to this constructor are passed to the underlying SSL
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
"""
default_port = port_by_scheme["https"]
cert_reqs = None
......@@ -287,7 +244,6 @@ class HTTPSConnection(HTTPConnection):
ca_cert_data = None
ssl_version = None
assert_fingerprint = None
tls_in_tls_required = False
def __init__(
self,
......@@ -352,15 +308,10 @@ class HTTPSConnection(HTTPConnection):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
# Google App Engine's httplib does not define _tunnel_host
if getattr(self, "_tunnel_host", None):
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
......@@ -418,26 +369,8 @@ class HTTPSConnection(HTTPConnection):
ca_cert_data=self.ca_cert_data,
server_hostname=server_hostname,
ssl_context=context,
tls_in_tls=tls_in_tls,
)
# If we're using all defaults and the connection
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
# for the host.
if (
default_ssl_context
and self.ssl_version is None
and hasattr(self.sock, "version")
and self.sock.version() in {"TLSv1", "TLSv1.1"}
):
warnings.warn(
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
"and will be disabled in urllib3 v2.0.0. Connecting to "
"'%s' with '%s' can be enabled by explicitly opting-in "
"with 'ssl_version'" % (self.host, self.sock.version()),
DeprecationWarning,
)
if self.assert_fingerprint:
assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
......@@ -468,40 +401,6 @@ class HTTPSConnection(HTTPConnection):
or self.assert_fingerprint is not None
)
def _connect_tls_proxy(self, hostname, conn):
"""
Establish a TLS connection to the proxy using the provided SSL context.
"""
proxy_config = self.proxy_config
ssl_context = proxy_config.ssl_context
if ssl_context:
# If the user provided a proxy context, we assume CA and client
# certificates have already been set
return ssl_wrap_socket(
sock=conn,
server_hostname=hostname,
ssl_context=ssl_context,
)
ssl_context = create_proxy_ssl_context(
self.ssl_version,
self.cert_reqs,
self.ca_certs,
self.ca_cert_dir,
self.ca_cert_data,
)
# If no cert was provided, use only the default options for server
# certificate validation
return ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=hostname,
ssl_context=ssl_context,
)
def _match_hostname(cert, asserted_hostname):
try:
......@@ -518,16 +417,6 @@ def _match_hostname(cert, asserted_hostname):
raise
def _get_default_user_agent():
return "python-urllib3/%s" % __version__
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
if not ssl:
HTTPSConnection = DummyConnection # noqa: F811
......
from __future__ import absolute_import
import errno
import logging
import socket
import sys
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .connection import (
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
VerifiedHTTPSConnection,
port_by_scheme,
)
from socket import error as SocketError, timeout as SocketTimeout
import socket
from .exceptions import (
ClosedPoolError,
ProtocolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
InsecureRequestWarning,
NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .packages.six.moves import queue
from .packages.ssl_match_hostname import CertificateError
from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection,
HTTPSConnection,
VerifiedHTTPSConnection,
HTTPException,
BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import get_host, parse_url
from .util.url import (
get_host,
parse_url,
Url,
_normalize_host as normalize_host,
_encode_target,
)
from .util.queue import LifoQueue
xrange = six.moves.xrange
......@@ -107,16 +111,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`http.client.HTTPConnection`.
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`http.client.HTTPConnection`.
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`http.client.HTTPConnection`.
:class:`httplib.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
......@@ -150,11 +154,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.ProxyManager`
:class:`urllib3.connectionpool.ProxyManager`"
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.ProxyManager`
instead, see :class:`urllib3.connectionpool.ProxyManager`"
:param \\**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
......@@ -177,7 +181,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
retries=None,
_proxy=None,
_proxy_headers=None,
_proxy_config=None,
**conn_kw
):
ConnectionPool.__init__(self, host, port)
......@@ -199,7 +202,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
......@@ -216,9 +218,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# list.
self.conn_kw.setdefault("socket_options", [])
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
......@@ -273,7 +272,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close()
if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
# http.client._tunnel() and cannot be reused (since it would
# httplib._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
......@@ -385,30 +384,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls http.client.*.request, not the method in
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
# With this behaviour, the received response is still readable.
except BrokenPipeError:
# Python 3
pass
except IOError as e:
# Python 2 and macOS/Linux
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno not in {
errno.EPIPE,
errno.ESHUTDOWN,
errno.EPROTOTYPE,
}:
raise
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
......@@ -551,12 +532,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
......@@ -586,7 +565,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
......@@ -623,10 +602,6 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
......@@ -644,7 +619,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parsed_url.url)
url = six.ensure_str(parse_url(url).url)
conn = None
......@@ -659,14 +634,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == "http":
headers = headers.copy()
headers.update(self.proxy_headers)
......@@ -692,7 +663,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn and http_tunnel_required:
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
......@@ -866,7 +837,11 @@ class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
......@@ -950,22 +925,15 @@ class HTTPSConnectionPool(HTTPConnectionPool):
def _prepare_proxy(self, conn):
"""
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`http.client.HTTPSConnection`.
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.debug(
......
......@@ -32,24 +32,22 @@ license and by oscrypto's:
from __future__ import absolute_import
import platform
from ctypes.util import find_library
from ctypes import (
CDLL,
CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p,
c_void_p,
c_int32,
c_long,
c_char_p,
c_size_t,
c_byte,
c_uint32,
c_ulong,
c_void_p,
c_long,
c_bool,
)
from ctypes.util import find_library
from ctypes import CDLL, POINTER, CFUNCTYPE
from urllib3.packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")
......@@ -295,13 +293,6 @@ try:
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
......
......@@ -10,13 +10,13 @@ appropriate and useful assistance to the higher-level code.
import base64
import ctypes
import itertools
import os
import re
import os
import ssl
import struct
import tempfile
from .bindings import CFConst, CoreFoundation, Security
from .bindings import Security, CoreFoundation, CFConst
# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
......@@ -56,51 +56,6 @@ def _cf_dictionary_from_tuples(tuples):
)
def _cfstr(py_bstr):
"""
Given a Python binary data, create a CFString.
The string must be CFReleased by the caller.
"""
c_str = ctypes.c_char_p(py_bstr)
cf_str = CoreFoundation.CFStringCreateWithCString(
CoreFoundation.kCFAllocatorDefault,
c_str,
CFConst.kCFStringEncodingUTF8,
)
return cf_str
def _create_cfstring_array(lst):
"""
Given a list of Python binary data, create an associated CFMutableArray.
The array must be CFReleased by the caller.
Raises an ssl.SSLError on failure.
"""
cf_arr = None
try:
cf_arr = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cf_arr:
raise MemoryError("Unable to allocate memory!")
for item in lst:
cf_str = _cfstr(item)
if not cf_str:
raise MemoryError("Unable to allocate memory!")
try:
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
finally:
CoreFoundation.CFRelease(cf_str)
except BaseException as e:
if cf_arr:
CoreFoundation.CFRelease(cf_arr)
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
return cf_arr
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
......@@ -371,26 +326,3 @@ def _load_client_cert_chain(keychain, *paths):
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
TLS_PROTOCOL_VERSIONS = {
"SSLv2": (0, 2),
"SSLv3": (3, 0),
"TLSv1": (3, 1),
"TLSv1.1": (3, 2),
"TLSv1.2": (3, 3),
}
def _build_tls_unknown_ca_alert(version):
"""
Builds a TLS alert record for an unknown CA.
"""
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
severity_fatal = 0x02
description_unknown_ca = 0x30
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
msg_len = len(msg)
record_type_alert = 0x15
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
return record
......@@ -39,24 +39,24 @@ urllib3 on Google App Engine:
"""
from __future__ import absolute_import
import io
import logging
import warnings
from ..packages.six.moves.urllib.parse import urljoin
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
SSLError,
TimeoutError,
SSLError,
)
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.retry import Retry
from ..util.timeout import Timeout
from ..util.retry import Retry
from . import _appengine_environ
try:
......@@ -90,7 +90,7 @@ class AppEngineManager(RequestMethods):
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabytes.
* If a response size is more than 32 megabtyes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
......
......@@ -6,12 +6,12 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
from __future__ import absolute_import
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__)
......
"""
TLS with SNI_-support for Python 2. Follow these instructions if you would
like to verify TLS certificates in Python 2. Note, the default libraries do
SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
* `pyOpenSSL`_ (tested with 16.0.0)
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
* `idna`_ (minimum 2.0, from cryptography)
* pyOpenSSL (tested with 16.0.0)
* cryptography (minimum 1.3.4, from pyopenssl)
* idna (minimum 2.0, from cryptography)
However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.
You can install them with the following command:
.. code-block:: bash
$ python -m pip install pyopenssl cryptography idna
pip install pyopenssl cryptography idna
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this:
.. code-block:: python
like this::
try:
import urllib3.contrib.pyopenssl
......@@ -39,11 +35,11 @@ when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
.. _pyopenssl: https://www.pyopenssl.org
.. _cryptography: https://cryptography.io
.. _idna: https://github.com/kjd/idna
"""
from __future__ import absolute_import
......@@ -60,9 +56,8 @@ except ImportError:
pass
from socket import timeout, error as SocketError
from io import BytesIO
from socket import error as SocketError
from socket import timeout
try: # Platform-specific: Python 2
from socket import _fileobject
......@@ -72,10 +67,11 @@ except ImportError: # Platform-specific: Python 3
import logging
import ssl
from ..packages import six
import sys
from .. import util
from ..packages import six
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
......@@ -469,10 +465,6 @@ class PyOpenSSLContext(object):
self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
def set_alpn_protocols(self, protocols):
protocols = [six.ensure_binary(p) for p in protocols]
return self._ctx.set_alpn_protos(protocols)
def wrap_socket(
self,
sock,
......
......@@ -29,8 +29,6 @@ library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
.. code-block::
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
......@@ -60,21 +58,16 @@ import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
import six
from .. import util
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
from ._securetransport.low_level import (
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
_load_client_cert_chain,
)
try: # Platform-specific: Python 2
......@@ -381,55 +374,16 @@ class WrappedSocket(object):
)
_assert_no_error(result)
def _set_alpn_protocols(self, protocols):
"""
Sets up the ALPN protocols on the context.
"""
if not protocols:
return
protocols_arr = _create_cfstring_array(protocols)
try:
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
_assert_no_error(result)
finally:
CoreFoundation.CFRelease(protocols_arr)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
try:
trust_result = self._evaluate_trust(trust_bundle)
if trust_result in successes:
return
reason = "error code: %d" % (trust_result,)
except Exception as e:
# Do not trust on error
reason = "exception: %r" % (e,)
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
self.socket.sendall(rec)
# close the connection immediately
# l_onoff = 1, activate linger
# l_linger = 0, linger for 0 seoncds
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
raise ssl.SSLError("certificate verify failed, %s" % reason)
def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
......@@ -467,7 +421,15 @@ class WrappedSocket(object):
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
return trust_result.value
# Ok, now we can look at what the result was.
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
if trust_result.value not in successes:
raise ssl.SSLError(
"certificate verify failed, error code: %d" % trust_result.value
)
def handshake(
self,
......@@ -479,7 +441,6 @@ class WrappedSocket(object):
client_cert,
client_key,
client_key_passphrase,
alpn_protocols,
):
"""
Actually performs the TLS handshake. This is run automatically by
......@@ -520,9 +481,6 @@ class WrappedSocket(object):
# Setup the ciphers.
self._set_ciphers()
# Setup the ALPN protocols.
self._set_alpn_protocols(alpn_protocols)
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
......@@ -796,7 +754,6 @@ class SecureTransportContext(object):
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
self._alpn_protocols = None
@property
def check_hostname(self):
......@@ -874,18 +831,6 @@ class SecureTransportContext(object):
self._client_key = keyfile
self._client_cert_passphrase = password
def set_alpn_protocols(self, protocols):
"""
Sets the ALPN protocols that will later be set on the context.
Raises a NotImplementedError if ALPN is not supported.
"""
if not hasattr(Security, "SSLSetALPNProtocols"):
raise NotImplementedError(
"SecureTransport supports ALPN only in macOS 10.12+"
)
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
def wrap_socket(
self,
sock,
......@@ -915,6 +860,5 @@ class SecureTransportContext(object):
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
......@@ -14,26 +14,22 @@ supports the following SOCKS features:
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
- Usernames and passwords for the SOCKS proxy
.. note::
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
your ``proxy_url`` to ensure that DNS resolution is done from the remote
server instead of client-side when connecting to a domain name.
.. note::
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
your ``proxy_url`` to ensure that DNS resolution is done from the remote
server instead of client-side when connecting to a domain name.
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
supports IPv4, IPv6, and domain names.
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
will be sent as the ``userid`` section of the SOCKS request:
.. code-block:: python
will be sent as the ``userid`` section of the SOCKS request::
proxy_url="socks4a://<userid>@proxy-host"
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
of the ``proxy_url`` will be sent as the username/password to authenticate
with the proxy:
.. code-block:: python
with the proxy::
proxy_url="socks5h://<username>:<password>@proxy-host"
......@@ -44,7 +40,6 @@ try:
import socks
except ImportError:
import warnings
from ..exceptions import DependencyWarning
warnings.warn(
......@@ -57,8 +52,7 @@ except ImportError:
)
raise
from socket import error as SocketError
from socket import timeout as SocketTimeout
from socket import error as SocketError, timeout as SocketTimeout
from ..connection import HTTPConnection, HTTPSConnection
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
......
from __future__ import absolute_import
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
# Base Exceptions
class HTTPError(Exception):
"""Base exception used by this module."""
"Base exception used by this module."
pass
class HTTPWarning(Warning):
"""Base warning used by this module."""
"Base warning used by this module."
pass
class PoolError(HTTPError):
"""Base exception for errors caused within a pool."""
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
......@@ -30,7 +27,7 @@ class PoolError(HTTPError):
class RequestError(PoolError):
"""Base exception for PoolErrors that have associated URLs."""
"Base exception for PoolErrors that have associated URLs."
def __init__(self, pool, url, message):
self.url = url
......@@ -42,13 +39,12 @@ class RequestError(PoolError):
class SSLError(HTTPError):
"""Raised when SSL certificate fails in an HTTPS connection."""
"Raised when SSL certificate fails in an HTTPS connection."
pass
class ProxyError(HTTPError):
"""Raised when the connection to a proxy fails."""
"Raised when the connection to a proxy fails."
def __init__(self, message, error, *args):
super(ProxyError, self).__init__(message, error, *args)
......@@ -56,14 +52,12 @@ class ProxyError(HTTPError):
class DecodeError(HTTPError):
"""Raised when automatic decoding based on Content-Type fails."""
"Raised when automatic decoding based on Content-Type fails."
pass
class ProtocolError(HTTPError):
"""Raised when something unexpected happens mid-request/response."""
"Raised when something unexpected happens mid-request/response."
pass
......@@ -93,7 +87,7 @@ class MaxRetryError(RequestError):
class HostChangedError(RequestError):
"""Raised when an existing pool gets a request for a foreign host."""
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
......@@ -102,7 +96,7 @@ class HostChangedError(RequestError):
class TimeoutStateError(HTTPError):
"""Raised when passing an invalid state to a timeout"""
""" Raised when passing an invalid state to a timeout """
pass
......@@ -118,45 +112,39 @@ class TimeoutError(HTTPError):
class ReadTimeoutError(TimeoutError, RequestError):
"""Raised when a socket timeout occurs while receiving data from a server"""
"Raised when a socket timeout occurs while receiving data from a server"
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"""Raised when a socket timeout occurs while connecting to a server"""
"Raised when a socket timeout occurs while connecting to a server"
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"""Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
class EmptyPoolError(PoolError):
"""Raised when a pool runs out of connections and no more are allowed."""
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"""Raised when a request enters a pool after the pool has been closed."""
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationValueError(ValueError, HTTPError):
"""Raised when there is something wrong with a given URL input."""
"Raised when there is something wrong with a given URL input."
pass
class LocationParseError(LocationValueError):
"""Raised when get_host or similar fails to parse the URL input."""
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
......@@ -165,56 +153,39 @@ class LocationParseError(LocationValueError):
self.location = location
class URLSchemeUnknown(LocationValueError):
"""Raised when a URL input has an unsupported scheme."""
def __init__(self, scheme):
message = "Not supported URL scheme %s" % scheme
super(URLSchemeUnknown, self).__init__(message)
self.scheme = scheme
class ResponseError(HTTPError):
"""Used as a container for an error reason supplied in a MaxRetryError."""
"Used as a container for an error reason supplied in a MaxRetryError."
GENERIC_ERROR = "too many error responses"
SPECIFIC_ERROR = "too many {status_code} error responses"
class SecurityWarning(HTTPWarning):
"""Warned when performing security reducing actions"""
"Warned when performing security reducing actions"
pass
class SubjectAltNameWarning(SecurityWarning):
"""Warned when connecting to a host with a certificate missing a SAN."""
"Warned when connecting to a host with a certificate missing a SAN."
pass
class InsecureRequestWarning(SecurityWarning):
"""Warned when making an unverified HTTPS request."""
"Warned when making an unverified HTTPS request."
pass
class SystemTimeWarning(SecurityWarning):
"""Warned when system time is suspected to be wrong"""
"Warned when system time is suspected to be wrong"
pass
class InsecurePlatformWarning(SecurityWarning):
"""Warned when certain TLS/SSL configuration is not available on a platform."""
"Warned when certain SSL configuration is not available on a platform."
pass
class SNIMissingWarning(HTTPWarning):
"""Warned when making a HTTPS request without SNI available."""
"Warned when making a HTTPS request without SNI available."
pass
......@@ -227,16 +198,29 @@ class DependencyWarning(HTTPWarning):
pass
class ResponseNotChunked(ProtocolError, ValueError):
"""Response needs to be chunked in order to read it as chunks."""
class InvalidProxyConfigurationWarning(HTTPWarning):
"""
Warned when using an HTTPS proxy and an HTTPS URL. Currently
urllib3 doesn't support HTTPS proxies and the proxy will be
contacted via HTTP instead. This warning can be fixed by
changing your HTTPS proxy URL into an HTTP proxy URL.
If you encounter this warning read this:
https://github.com/urllib3/urllib3/issues/1850
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
class BodyNotHttplibCompatible(HTTPError):
"""
Body should be :class:`http.client.HTTPResponse` like
(have an fp attribute which returns raw chunks) for read_chunked().
Body should be httplib.HTTPResponse like (have an fp attribute which
returns raw chunks) for read_chunked().
"""
pass
......@@ -246,8 +230,9 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
"""
Response length doesn't match expected Content-Length
Subclass of :class:`http.client.IncompleteRead` to allow int value
for ``partial`` to avoid creating large objects on streamed reads.
Subclass of http_client.IncompleteRead to allow int value
for `partial` to avoid creating large objects on streamed
reads.
"""
def __init__(self, partial, expected):
......@@ -260,57 +245,22 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
)
class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
"""Invalid chunk length in a chunked response."""
def __init__(self, response, length):
super(InvalidChunkLength, self).__init__(
response.tell(), response.length_remaining
)
self.response = response
self.length = length
def __repr__(self):
return "InvalidChunkLength(got length %r, %i bytes read)" % (
self.length,
self.partial,
)
class InvalidHeader(HTTPError):
"""The header provided was somehow invalid."""
"The header provided was somehow invalid."
pass
class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
"""ProxyManager does not support the supplied scheme"""
class ProxySchemeUnknown(AssertionError, ValueError):
"ProxyManager does not support the supplied scheme"
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
# 'localhost' is here because our URL parser parses
# localhost:8080 -> scheme=localhost, remove if we fix this.
if scheme == "localhost":
scheme = None
if scheme is None:
message = "Proxy URL had no scheme, should start with http:// or https://"
else:
message = (
"Proxy URL had unsupported scheme %s, should use http:// or https://"
% scheme
)
message = "Not supported proxy scheme %s" % scheme
super(ProxySchemeUnknown, self).__init__(message)
class ProxySchemeUnsupported(ValueError):
"""Fetching HTTPS resources through HTTPS proxies is unsupported"""
pass
class HeaderParsingError(HTTPError):
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
def __init__(self, defects, unparsed_data):
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
......@@ -318,6 +268,5 @@ class HeaderParsingError(HTTPError):
class UnrewindableBodyError(HTTPError):
"""urllib3 encountered an error when trying to rewind a body"""
"urllib3 encountered an error when trying to rewind a body"
pass
from __future__ import absolute_import
import email.utils
import mimetypes
import re
......@@ -27,8 +26,7 @@ def format_header_param_rfc2231(name, value):
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
:param name:
The name of the parameter, a string expected to be ASCII only.
......@@ -67,6 +65,7 @@ _HTML5_REPLACEMENTS = {
u"\u0022": u"%22",
# Replace "\" with "\\".
u"\u005C": u"\u005C\u005C",
u"\u005C": u"\u005C\u005C",
}
# All control characters from 0x00 to 0x1F *except* 0x1B.
......
from __future__ import absolute_import
import binascii
import codecs
import os
from io import BytesIO
from .fields import RequestField
from .packages import six
from .packages.six import b
from .fields import RequestField
writer = codecs.lookup("utf-8")[3]
......
......@@ -7,6 +7,7 @@ Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
......
......@@ -10,13 +10,10 @@ try:
except ImportError:
try:
# Backport of the function from a pypi module
from backports.ssl_match_hostname import ( # type: ignore
CertificateError,
match_hostname,
)
from backports.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# Our vendored copy
from ._implementation import CertificateError, match_hostname # type: ignore
from ._implementation import CertificateError, match_hostname
# Not needed, but documenting what we provide.
__all__ = ("CertificateError", "match_hostname")
from __future__ import absolute_import
import collections
import functools
import logging
import warnings
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
URLSchemeUnknown,
InvalidProxyConfigurationWarning,
)
from .packages import six
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.url import parse_url
from .util.retry import Retry
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
......@@ -59,7 +59,6 @@ _key_fields = (
"key_headers", # dict
"key__proxy", # parsed proxy url
"key__proxy_headers", # dict
"key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict
"key_assert_hostname", # bool or string
......@@ -71,9 +70,6 @@ _key_fields = (
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields)
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
def _default_key_normalizer(key_class, request_context):
"""
......@@ -165,7 +161,6 @@ class PoolManager(RequestMethods):
"""
proxy = None
proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
......@@ -187,7 +182,7 @@ class PoolManager(RequestMethods):
def _new_pool(self, scheme, host, port, request_context=None):
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
Create a new :class:`ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
......@@ -223,7 +218,7 @@ class PoolManager(RequestMethods):
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
Get a :class:`ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
......@@ -246,22 +241,20 @@ class PoolManager(RequestMethods):
def connection_from_context(self, request_context):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key_constructor = self.key_fn_by_scheme[scheme]
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
Get a :class:`ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
......@@ -319,39 +312,9 @@ class PoolManager(RequestMethods):
base_pool_kwargs[key] = value
return base_pool_kwargs
def _proxy_requires_url_absolute_form(self, parsed_url):
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def _validate_proxy_scheme_url_selection(self, url_scheme):
"""
Validates that were not attempting to do TLS in TLS connections on
Python2 or with unsupported SSL implementations.
"""
if self.proxy is None or url_scheme != "https":
return
if self.proxy.scheme != "https":
return
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies "
"'via CONNECT tunnels' is not supported in Python 2"
)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
......@@ -359,8 +322,6 @@ class PoolManager(RequestMethods):
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
......@@ -369,7 +330,7 @@ class PoolManager(RequestMethods):
if "headers" not in kw:
kw["headers"] = self.headers.copy()
if self._proxy_requires_url_absolute_form(u):
if self.proxy is not None and u.scheme == "http":
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
......@@ -431,19 +392,6 @@ class ProxyManager(PoolManager):
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
:param proxy_ssl_context:
The proxy SSL context is used to establish the TLS connection to the
proxy when using HTTPS proxies.
:param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
......@@ -463,8 +411,6 @@ class ProxyManager(PoolManager):
num_pools=10,
headers=None,
proxy_headers=None,
proxy_ssl_context=None,
use_forwarding_for_https=False,
**connection_pool_kw
):
......@@ -475,22 +421,18 @@ class ProxyManager(PoolManager):
proxy_url.port,
)
proxy = parse_url(proxy_url)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
......@@ -519,13 +461,27 @@ class ProxyManager(PoolManager):
headers_.update(headers)
return headers_
def _validate_proxy_scheme_url_selection(self, url_scheme):
if url_scheme == "https" and self.proxy.scheme == "https":
warnings.warn(
"Your proxy configuration specified an HTTPS scheme for the proxy. "
"Are you sure you want to use HTTPS to contact the proxy? "
"This most likely indicates an error in your configuration. "
"Read this issue for more info: "
"https://github.com/urllib3/urllib3/issues/1850",
InvalidProxyConfigurationWarning,
stacklevel=3,
)
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. If we're not using CONNECT,
# we'll definitely need to set 'Host' at the very least.
self._validate_proxy_scheme_url_selection(u.scheme)
if u.scheme == "http":
# For proxied HTTPS requests, httplib sets the necessary headers
# on the CONNECT to the proxy. For HTTP, we'll definitely
# need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
......
......@@ -3,14 +3,15 @@ from __future__ import absolute_import
from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"]
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`urllib3.HTTPConnectionPool` and
:class:`urllib3.PoolManager`.
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
:class:`~urllib3.poolmanager.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
......@@ -110,9 +111,9 @@ class RequestMethods(object):
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:func:`urllib3.encode_multipart_formdata` is used to encode
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:func:`urllib.parse.urlencode` is used with the
:meth:`urllib.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
......
from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
import logging
import zlib
from contextlib import contextmanager
from socket import error as SocketError
from socket import timeout as SocketTimeout
from socket import error as SocketError
try:
import brotli
......@@ -13,20 +12,19 @@ except ImportError:
brotli = None
from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import (
BodyNotHttplibCompatible,
DecodeError,
HTTPError,
IncompleteRead,
InvalidChunkLength,
InvalidHeader,
ProtocolError,
DecodeError,
ReadTimeoutError,
ResponseNotChunked,
SSLError,
IncompleteRead,
InvalidHeader,
HTTPError,
)
from .packages import six
from .packages.six import string_types as basestring, PY3
from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__)
......@@ -158,13 +156,13 @@ class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
......@@ -174,7 +172,7 @@ class HTTPResponse(io.IOBase):
'content-encoding' header.
:param original_response:
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
......@@ -234,7 +232,7 @@ class HTTPResponse(io.IOBase):
self.msg = msg
self._request_url = request_url
if body and isinstance(body, (six.string_types, bytes)):
if body and isinstance(body, (basestring, bytes)):
self._body = body
self._pool = pool
......@@ -292,7 +290,7 @@ class HTTPResponse(io.IOBase):
@property
def data(self):
# For backwards-compat with earlier urllib3 0.4 and earlier.
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
......@@ -309,8 +307,8 @@ class HTTPResponse(io.IOBase):
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
if bytes are encoded on the wire (e.g, compressed).
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
......@@ -444,9 +442,10 @@ class HTTPResponse(io.IOBase):
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e):
# SSL errors related to framing/MAC get wrapped and reraised here
raise SSLError(e)
if "read operation timed out" not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
raise ReadTimeoutError(self._pool, None, "Read timed out.")
......@@ -480,7 +479,7 @@ class HTTPResponse(io.IOBase):
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
......@@ -581,7 +580,7 @@ class HTTPResponse(io.IOBase):
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
......@@ -590,11 +589,11 @@ class HTTPResponse(io.IOBase):
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if six.PY2:
if PY3:
headers = HTTPHeaderDict(headers.items())
else:
# Python 2.7
headers = HTTPHeaderDict.from_httplib(headers)
else:
headers = HTTPHeaderDict(headers.items())
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, "strict", 0)
......@@ -610,7 +609,7 @@ class HTTPResponse(io.IOBase):
)
return resp
# Backwards-compatibility methods for http.client.HTTPResponse
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
......@@ -680,8 +679,8 @@ class HTTPResponse(io.IOBase):
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
:class:`http.client.HTTPResponse` object. We do this by testing for
the fp attribute. If it is present we assume it returns raw chunks as
httplib.HTTPResponse object. We do this by testing for the fp
attribute. If it is present we assume it returns raw chunks as
processed by read_chunked().
"""
return hasattr(self._fp, "fp")
......@@ -698,7 +697,7 @@ class HTTPResponse(io.IOBase):
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise InvalidChunkLength(self, line)
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
......@@ -745,7 +744,7 @@ class HTTPResponse(io.IOBase):
)
if not self.supports_chunked_reads():
raise BodyNotHttplibCompatible(
"Body should be http.client.HTTPResponse like. "
"Body should be httplib.HTTPResponse like. "
"It should have have an fp attribute which returns raw chunks."
)
......
......@@ -2,23 +2,23 @@ from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
from .request import make_headers
from .response import is_fp_closed
from .retry import Retry
from .ssl_ import (
ALPN_PROTOCOLS,
SSLContext,
HAS_SNI,
IS_PYOPENSSL,
IS_SECURETRANSPORT,
PROTOCOL_TLS,
SSLContext,
assert_fingerprint,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
PROTOCOL_TLS,
)
from .timeout import Timeout, current_time
from .url import Url, get_host, parse_url, split_first
from .timeout import current_time, Timeout
from .retry import Retry
from .url import get_host, parse_url, split_first, Url
from .wait import wait_for_read, wait_for_write
__all__ = (
......@@ -27,7 +27,6 @@ __all__ = (
"IS_SECURETRANSPORT",
"SSLContext",
"PROTOCOL_TLS",
"ALPN_PROTOCOLS",
"Retry",
"Timeout",
"Url",
......@@ -44,6 +43,4 @@ __all__ = (
"ssl_wrap_socket",
"wait_for_read",
"wait_for_write",
"SKIP_HEADER",
"SKIPPABLE_HEADERS",
)
from __future__ import absolute_import
import socket
from urllib3.exceptions import LocationParseError
from ..contrib import _appengine_environ
from ..packages import six
from .wait import NoWayToWaitForSocketError, wait_for_read
from ..contrib import _appengine_environ
def is_connection_dropped(conn): # Platform-specific
......@@ -14,7 +9,7 @@ def is_connection_dropped(conn): # Platform-specific
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`http.client.HTTPConnection` object.
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
......@@ -47,7 +42,7 @@ def create_connection(
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`socket.getdefaulttimeout`
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
......@@ -63,13 +58,6 @@ def create_connection(
# The original create_connection function always returns all records.
family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
return six.raise_from(
LocationParseError(u"'%s', label empty or too long" % host), None
)
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
......
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
def connection_requires_http_tunnel(
proxy_url=None, proxy_config=None, destination_scheme=None
):
"""
Returns True if the connection requires an HTTP CONNECT through the proxy.
:param URL proxy_url:
URL of the proxy.
:param ProxyConfig proxy_config:
Proxy configuration from poolmanager.py
:param str destination_scheme:
The scheme of the destination. (i.e https, http, etc)
"""
# If we're not using a proxy, no way to use a tunnel.
if proxy_url is None:
return False
# HTTP destinations never require tunneling, we always forward.
if destination_scheme == "http":
return False
# Support for forwarding with HTTPS proxies and HTTPS destinations.
if (
proxy_url.scheme == "https"
and proxy_config
and proxy_config.use_forwarding_for_https
):
return False
# Otherwise always use a tunnel.
return True
def create_proxy_ssl_context(
ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
):
"""
Generates a default proxy ssl context if one hasn't been provided by the
user.
"""
ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
if (
not ca_certs
and not ca_cert_dir
and not ca_cert_data
and hasattr(ssl_context, "load_default_certs")
):
ssl_context.load_default_certs()
return ssl_context
import collections
from ..packages import six
from ..packages.six.moves import queue
......
from __future__ import absolute_import
from base64 import b64encode
from ..exceptions import UnrewindableBodyError
from ..packages.six import b, integer_types
# Pass as a value within ``headers`` to skip
# emitting some HTTP headers that are added automatically.
# The only headers that are supported are ``Accept-Encoding``,
# ``Host``, and ``User-Agent``.
SKIP_HEADER = "@@@SKIP_HEADER@@@"
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
from ..exceptions import UnrewindableBodyError
ACCEPT_ENCODING = "gzip,deflate"
try:
......
from __future__ import absolute_import
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
from ..packages.six.moves import http_client as httplib
def is_fp_closed(obj):
......@@ -44,7 +42,8 @@ def assert_header_parsing(headers):
Only works on Python 3.
:param http.client.HTTPMessage headers: Headers to verify.
:param headers: Headers to verify.
:type headers: `httplib.HTTPMessage`.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
......@@ -67,25 +66,6 @@ def assert_header_parsing(headers):
if isinstance(payload, (bytes, str)):
unparsed_data = payload
if defects:
# httplib is assuming a response body is available
# when parsing headers even when httplib only sends
# header data to parse_headers() This results in
# defects on multipart responses in particular.
# See: https://github.com/urllib3/urllib3/issues/800
# So we ignore the following defects:
# - StartBoundaryNotFoundDefect:
# The claimed start boundary was never found.
# - MultipartInvariantViolationDefect:
# A message claimed to be a multipart but no subparts were found.
defects = [
defect
for defect in defects
if not isinstance(
defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
)
]
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
......@@ -96,9 +76,8 @@ def is_response_to_head(response):
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
:param http.client.HTTPResponse response:
Response to check if the originating request
used 'HEAD' as a method.
:param conn:
:type conn: :class:`httplib.HTTPResponse`
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
......
from __future__ import absolute_import
import email
import logging
import re
import time
import warnings
import logging
from collections import namedtuple
from itertools import takewhile
import email
import re
from ..exceptions import (
ConnectTimeoutError,
InvalidHeader,
MaxRetryError,
ProtocolError,
ProxyError,
ReadTimeoutError,
ResponseError,
InvalidHeader,
ProxyError,
)
from ..packages import six
log = logging.getLogger(__name__)
......@@ -28,49 +27,6 @@ RequestHistory = namedtuple(
)
# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
_Default = object()
class _RetryMeta(type):
@property
def DEFAULT_METHOD_WHITELIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
DeprecationWarning,
)
return cls.DEFAULT_ALLOWED_METHODS
@DEFAULT_METHOD_WHITELIST.setter
def DEFAULT_METHOD_WHITELIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
DeprecationWarning,
)
cls.DEFAULT_ALLOWED_METHODS = value
@property
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
@DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
@six.add_metaclass(_RetryMeta)
class Retry(object):
"""Retry configuration.
......@@ -98,7 +54,8 @@ class Retry(object):
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
counts.
counts. It's a good idea to set this to some sensibly-high value to
account for unexpected edge cases and avoid infinite retry loops.
Set to ``0`` to fail on the first retry.
......@@ -139,35 +96,18 @@ class Retry(object):
Set to ``0`` to fail on the first retry of this type.
:param int other:
How many times to retry on other errors.
Other errors are errors that are not connect, read, redirect or status errors.
These errors might be raised after the request was sent to the server, so the
request might have side-effects.
Set to ``0`` to fail on the first retry of this type.
If ``total`` is not set, it's a good idea to set this to 0 to account
for unexpected edge cases and avoid infinite retry loops.
:param iterable allowed_methods:
:param iterable method_whitelist:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
Set to a ``False`` value to retry on any verb.
.. warning::
Previously this parameter was named ``method_whitelist``, that
usage is deprecated in v1.26.0 and will be removed in v2.0.
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``allowed_methods``
A retry is initiated if the request method is in ``method_whitelist``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
......@@ -208,16 +148,13 @@ class Retry(object):
request.
"""
#: Default methods to be used for ``allowed_methods``
DEFAULT_ALLOWED_METHODS = frozenset(
DEFAULT_METHOD_WHITELIST = frozenset(
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
)
#: Default status codes to be used for ``status_forcelist``
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
#: Default headers to be used for ``remove_headers_on_redirect``
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
#: Maximum backoff time.
BACKOFF_MAX = 120
......@@ -229,42 +166,20 @@ class Retry(object):
read=None,
redirect=None,
status=None,
other=None,
allowed_methods=_Default,
method_whitelist=DEFAULT_METHOD_WHITELIST,
status_forcelist=None,
backoff_factor=0,
raise_on_redirect=True,
raise_on_status=True,
history=None,
respect_retry_after_header=True,
remove_headers_on_redirect=_Default,
# TODO: Deprecated, remove in v2.0
method_whitelist=_Default,
remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
):
if method_whitelist is not _Default:
if allowed_methods is not _Default:
raise ValueError(
"Using both 'allowed_methods' and "
"'method_whitelist' together is not allowed. "
"Instead only use 'allowed_methods'"
)
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
allowed_methods = method_whitelist
if allowed_methods is _Default:
allowed_methods = self.DEFAULT_ALLOWED_METHODS
if remove_headers_on_redirect is _Default:
remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
self.total = total
self.connect = connect
self.read = read
self.status = status
self.other = other
if redirect is False or total is False:
redirect = 0
......@@ -272,7 +187,7 @@ class Retry(object):
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
self.allowed_methods = allowed_methods
self.method_whitelist = method_whitelist
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
......@@ -289,7 +204,7 @@ class Retry(object):
read=self.read,
redirect=self.redirect,
status=self.status,
other=self.other,
method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
......@@ -298,23 +213,6 @@ class Retry(object):
remove_headers_on_redirect=self.remove_headers_on_redirect,
respect_retry_after_header=self.respect_retry_after_header,
)
# TODO: If already given in **kw we use what's given to us
# If not given we need to figure out what to pass. We decide
# based on whether our class has the 'method_whitelist' property
# and if so we pass the deprecated 'method_whitelist' otherwise
# we use 'allowed_methods'. Remove in v2.0
if "method_whitelist" not in kw and "allowed_methods" not in kw:
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
params["method_whitelist"] = self.allowed_methods
else:
params["allowed_methods"] = self.allowed_methods
params.update(kw)
return type(self)(**params)
......@@ -428,26 +326,15 @@ class Retry(object):
def _is_method_retryable(self, method):
"""Checks if a given HTTP method should be retried upon, depending if
it is included in the allowed_methods
it is included on the method whitelist.
"""
# TODO: For now favor if the Retry implementation sets its own method_whitelist
# property outside of our constructor to avoid breaking custom implementations.
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
allowed_methods = self.method_whitelist
else:
allowed_methods = self.allowed_methods
if allowed_methods and method.upper() not in allowed_methods:
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
return True
def is_retry(self, method, status_code, has_retry_after=False):
"""Is this method/status code retryable? (Based on allowlists and control
"""Is this method/status code retryable? (Based on whitelists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
......@@ -468,14 +355,7 @@ class Retry(object):
def is_exhausted(self):
""" Are we out of retries? """
retry_counts = (
self.total,
self.connect,
self.read,
self.redirect,
self.status,
self.other,
)
retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
......@@ -513,7 +393,6 @@ class Retry(object):
read = self.read
redirect = self.redirect
status_count = self.status
other = self.other
cause = "unknown"
status = None
redirect_location = None
......@@ -532,11 +411,6 @@ class Retry(object):
elif read is not None:
read -= 1
elif error:
# Other retry?
if other is not None:
other -= 1
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
......@@ -547,7 +421,7 @@ class Retry(object):
else:
# Incrementing because of a server error like a 500 in
# status_forcelist and the given method is in the allowed_methods
# status_forcelist and a the given method is in the whitelist
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
......@@ -565,7 +439,6 @@ class Retry(object):
read=read,
redirect=redirect,
status=status_count,
other=other,
history=history,
)
......@@ -582,20 +455,6 @@ class Retry(object):
"read={self.read}, redirect={self.redirect}, status={self.status})"
).format(cls=type(self), self=self)
def __getattr__(self, item):
if item == "method_whitelist":
# TODO: Remove this deprecated alias in v2.0
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
return self.allowed_methods
try:
return getattr(super(Retry, self), item)
except AttributeError:
return getattr(Retry, item)
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
from __future__ import absolute_import
import errno
import warnings
import hmac
import os
import sys
import warnings
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
from ..exceptions import (
InsecurePlatformWarning,
ProxySchemeUnsupported,
SNIMissingWarning,
SSLError,
)
from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
from ..packages import six
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
SSLContext = None
SSLTransport = None
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
ALPN_PROTOCOLS = ["http/1.1"]
# Maps the length of a digest to a possible hash function producing this digest
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
......@@ -44,21 +39,11 @@ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_ba
try: # Test for SSL features
import ssl
from ssl import CERT_REQUIRED, wrap_socket
except ImportError:
pass
try:
from ssl import wrap_socket, CERT_REQUIRED
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
try:
from .ssltransport import SSLTransport
except ImportError:
pass
try: # Platform-specific: Python 3.6
from ssl import PROTOCOL_TLS
......@@ -73,18 +58,12 @@ except ImportError:
try:
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
try: # OP_NO_TICKET was added in Python 3.6
from ssl import OP_NO_TICKET
except ImportError:
OP_NO_TICKET = 0x4000
# A secure default.
# Sources for more information on TLS ciphers:
#
......@@ -271,7 +250,7 @@ def create_urllib3_context(
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
:param ciphers:
Which cipher suites to allow the server to select.
:returns:
......@@ -294,11 +273,6 @@ def create_urllib3_context(
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
# TLSv1.2 only. Unless set explicitly, do not request tickets.
# This may save some bandwidth on wire, and although the ticket is encrypted,
# there is a risk associated with it being on wire,
# if the server is not rotating its ticketing keys properly.
options |= OP_NO_TICKET
context.options |= options
......@@ -344,7 +318,6 @@ def ssl_wrap_socket(
ca_cert_dir=None,
key_password=None,
ca_cert_data=None,
tls_in_tls=False,
):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
......@@ -366,8 +339,6 @@ def ssl_wrap_socket(
:param ca_cert_data:
Optional string containing CA certificates in PEM format suitable for
passing as the cadata parameter to SSLContext.load_verify_locations()
:param tls_in_tls:
Use SSLTransport to wrap the existing socket.
"""
context = ssl_context
if context is None:
......@@ -379,8 +350,14 @@ def ssl_wrap_socket(
if ca_certs or ca_cert_dir or ca_cert_data:
try:
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
except (IOError, OSError) as e:
except IOError as e: # Platform-specific: Python 2.7
raise SSLError(e)
# Py33 raises FileNotFoundError which subclasses OSError
# These are not equivalent unless we check the errno attribute
except OSError as e: # Platform-specific: Python 3.3 and beyond
if e.errno == errno.ENOENT:
raise SSLError(e)
raise
elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows (require Python3.4+)
......@@ -398,21 +375,16 @@ def ssl_wrap_socket(
else:
context.load_cert_chain(certfile, keyfile, key_password)
try:
if hasattr(context, "set_alpn_protocols"):
context.set_alpn_protocols(ALPN_PROTOCOLS)
except NotImplementedError:
pass
# If we detect server_hostname is an IP address then the SNI
# extension should not be used according to RFC3546 Section 3.1
use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
# SecureTransport uses server_hostname in certificate verification.
send_sni = (use_sni_hostname and HAS_SNI) or (
IS_SECURETRANSPORT and server_hostname
)
# Do not warn the user if server_hostname is an invalid SNI hostname.
if not HAS_SNI and use_sni_hostname:
# We shouldn't warn the user if SNI isn't available but we would
# not be using SNI anyways due to IP address for server_hostname.
if (
server_hostname is not None and not is_ipaddress(server_hostname)
) or IS_SECURETRANSPORT:
if HAS_SNI and server_hostname is not None:
return context.wrap_socket(sock, server_hostname=server_hostname)
warnings.warn(
"An HTTPS request has been made, but the SNI (Server Name "
"Indication) extension to TLS is not available on this platform. "
......@@ -424,13 +396,7 @@ def ssl_wrap_socket(
SNIMissingWarning,
)
if send_sni:
ssl_sock = _ssl_wrap_socket_impl(
sock, context, tls_in_tls, server_hostname=server_hostname
)
else:
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
return ssl_sock
return context.wrap_socket(sock)
def is_ipaddress(hostname):
......@@ -455,20 +421,3 @@ def _is_key_file_encrypted(key_file):
return True
return False
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
if tls_in_tls:
if not SSLTransport:
# Import error, ssl is not available.
raise ProxySchemeUnsupported(
"TLS in TLS requires support for the 'ssl' module"
)
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
return SSLTransport(sock, ssl_context, server_hostname)
if server_hostname:
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
else:
return ssl_context.wrap_socket(sock)
import io
import socket
import ssl
from urllib3.exceptions import ProxySchemeUnsupported
from urllib3.packages import six
SSL_BLOCKSIZE = 16384
class SSLTransport:
"""
The SSLTransport wraps an existing socket and establishes an SSL connection.
Contrary to Python's implementation of SSLSocket, it allows you to chain
multiple TLS connections together. It's particularly useful if you need to
implement TLS within TLS.
The class supports most of the socket API operations.
"""
@staticmethod
def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if not hasattr(ssl_context, "wrap_bio"):
if six.PY2:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"supported on Python 2"
)
else:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"available on non-native SSLContext"
)
def __init__(
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
):
"""
Create an SSLTransport around socket using the provided ssl_context.
"""
self.incoming = ssl.MemoryBIO()
self.outgoing = ssl.MemoryBIO()
self.suppress_ragged_eofs = suppress_ragged_eofs
self.socket = socket
self.sslobj = ssl_context.wrap_bio(
self.incoming, self.outgoing, server_hostname=server_hostname
)
# Perform initial handshake.
self._ssl_io_loop(self.sslobj.do_handshake)
def __enter__(self):
return self
def __exit__(self, *_):
self.close()
def fileno(self):
return self.socket.fileno()
def read(self, len=1024, buffer=None):
return self._wrap_ssl_read(len, buffer)
def recv(self, len=1024, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to recv")
return self._wrap_ssl_read(len)
def recv_into(self, buffer, nbytes=None, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to recv_into")
if buffer and (nbytes is None):
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
return self.read(nbytes, buffer)
def sendall(self, data, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to sendall")
count = 0
with memoryview(data) as view, view.cast("B") as byte_view:
amount = len(byte_view)
while count < amount:
v = self.send(byte_view[count:])
count += v
def send(self, data, flags=0):
if flags != 0:
raise ValueError("non-zero flags not allowed in calls to send")
response = self._ssl_io_loop(self.sslobj.write, data)
return response
def makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Python's httpclient uses makefile and buffered io when reading HTTP
messages and we need to support it.
This is unfortunately a copy and paste of socket.py makefile with small
changes to point to the socket directly.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = socket.SocketIO(self, rawmode)
self.socket._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def unwrap(self):
self._ssl_io_loop(self.sslobj.unwrap)
def close(self):
self.socket.close()
def getpeercert(self, binary_form=False):
return self.sslobj.getpeercert(binary_form)
def version(self):
return self.sslobj.version()
def cipher(self):
return self.sslobj.cipher()
def selected_alpn_protocol(self):
return self.sslobj.selected_alpn_protocol()
def selected_npn_protocol(self):
return self.sslobj.selected_npn_protocol()
def shared_ciphers(self):
return self.sslobj.shared_ciphers()
def compression(self):
return self.sslobj.compression()
def settimeout(self, value):
self.socket.settimeout(value)
def gettimeout(self):
return self.socket.gettimeout()
def _decref_socketios(self):
self.socket._decref_socketios()
def _wrap_ssl_read(self, len, buffer=None):
try:
return self._ssl_io_loop(self.sslobj.read, len, buffer)
except ssl.SSLError as e:
if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
return 0 # eof, return 0.
else:
raise
def _ssl_io_loop(self, func, *args):
""" Performs an I/O loop between incoming/outgoing and the socket."""
should_loop = True
ret = None
while should_loop:
errno = None
try:
ret = func(*args)
except ssl.SSLError as e:
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
# WANT_READ, and WANT_WRITE are expected, others are not.
raise e
errno = e.errno
buf = self.outgoing.read()
self.socket.sendall(buf)
if errno is None:
should_loop = False
elif errno == ssl.SSL_ERROR_WANT_READ:
buf = self.socket.recv(SSL_BLOCKSIZE)
if buf:
self.incoming.write(buf)
else:
self.incoming.write_eof()
return ret
from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
import time
from ..exceptions import TimeoutStateError
......@@ -20,26 +19,20 @@ current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
"""Timeout configuration.
Timeouts can be defined as a default for a pool:
.. code-block:: python
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool):
Timeouts can be defined as a default for a pool::
.. code-block:: python
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Or per-request (which overrides the default for the pool)::
Timeouts can be disabled by setting all the parameters to ``None``:
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
.. code-block:: python
Timeouts can be disabled by setting all the parameters to ``None``::
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
......@@ -50,7 +43,7 @@ class Timeout(object):
Defaults to None.
:type total: int, float, or None
:type total: integer, float, or None
:param connect:
The maximum amount of time (in seconds) to wait for a connection
......@@ -60,7 +53,7 @@ class Timeout(object):
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: int, float, or None
:type connect: integer, float, or None
:param read:
The maximum amount of time (in seconds) to wait between consecutive
......@@ -70,7 +63,7 @@ class Timeout(object):
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: int, float, or None
:type read: integer, float, or None
.. note::
......
from __future__ import absolute_import
import re
from collections import namedtuple
from ..exceptions import LocationParseError
from ..packages import six
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
# We only want to normalize urls with an HTTP(S) scheme.
......
import errno
from functools import partial
import select
import sys
from functools import partial
try:
from time import monotonic
......
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment