diff --git a/.gitignore b/.gitignore
index d4c1ea9a..35805828 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,6 +37,10 @@ tests/failed.db
*.sw?
Session.vim
.ropeproject/*
+*.iml
+.idea
+*.ipr
+.settings/*
# OS generated files #
######################
@@ -48,7 +52,6 @@ ehthumbs.db
Thumbs.db
.directory
*~
-/.idea/
*.torrent
# Unrar Executable #
diff --git a/SickBeard.py b/SickBeard.py
index 8ff445f4..8f0f0de5 100755
--- a/SickBeard.py
+++ b/SickBeard.py
@@ -50,6 +50,10 @@ sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib'
if sys.hexversion >= 0x020600F0:
from multiprocessing import freeze_support # @UnresolvedImport
+if sys.version_info >= (2, 7, 9):
+ import ssl
+ ssl._create_default_https_context = ssl._create_unverified_context
+
import locale
import datetime
import threading
diff --git a/gui/slick/css/country-flags.css b/gui/slick/css/country-flags.css
index 8c116dca..44ca6e19 100644
--- a/gui/slick/css/country-flags.css
+++ b/gui/slick/css/country-flags.css
@@ -1,257 +1,257 @@
-.country-flag {
- width: 16px;
- height: 11px;
- background:url(images/country-flags.png) no-repeat
-}
-
-.country-flag.flag-ad {background-position: -16px 0}
-.country-flag.flag-ae {background-position: -32px 0}
-.country-flag.flag-af {background-position: -48px 0}
-.country-flag.flag-ag {background-position: -64px 0}
-.country-flag.flag-ai {background-position: -80px 0}
-.country-flag.flag-al {background-position: -96px 0}
-.country-flag.flag-am {background-position: -112px 0}
-.country-flag.flag-an {background-position: -128px 0}
-.country-flag.flag-ao {background-position: -144px 0}
-.country-flag.flag-ar {background-position: -160px 0}
-.country-flag.flag-as {background-position: -176px 0}
-.country-flag.flag-at {background-position: -192px 0}
-.country-flag.flag-au {background-position: -208px 0}
-.country-flag.flag-aw {background-position: -224px 0}
-.country-flag.flag-az {background-position: -240px 0}
-.country-flag.flag-ba {background-position: 0 -11px}
-.country-flag.flag-bb {background-position: -16px -11px}
-.country-flag.flag-bd {background-position: -32px -11px}
-.country-flag.flag-be {background-position: -48px -11px}
-.country-flag.flag-bf {background-position: -64px -11px}
-.country-flag.flag-bg {background-position: -80px -11px}
-.country-flag.flag-bh {background-position: -96px -11px}
-.country-flag.flag-bi {background-position: -112px -11px}
-.country-flag.flag-bj {background-position: -128px -11px}
-.country-flag.flag-bm {background-position: -144px -11px}
-.country-flag.flag-bn {background-position: -160px -11px}
-.country-flag.flag-bo {background-position: -176px -11px}
-.country-flag.flag-br {background-position: -192px -11px}
-.country-flag.flag-bs {background-position: -208px -11px}
-.country-flag.flag-bt {background-position: -224px -11px}
-.country-flag.flag-bv {background-position: -240px -11px}
-.country-flag.flag-bw {background-position: 0 -22px}
-.country-flag.flag-by {background-position: -16px -22px}
-.country-flag.flag-bz {background-position: -32px -22px}
-.country-flag.flag-ca {background-position: -48px -22px}
-.country-flag.flag-catalonia {background-position: -64px -22px}
-.country-flag.flag-cd {background-position: -80px -22px}
-.country-flag.flag-cf {background-position: -96px -22px}
-.country-flag.flag-cg {background-position: -112px -22px}
-.country-flag.flag-ch {background-position: -128px -22px}
-.country-flag.flag-ci {background-position: -144px -22px}
-.country-flag.flag-ck {background-position: -160px -22px}
-.country-flag.flag-cl {background-position: -176px -22px}
-.country-flag.flag-cm {background-position: -192px -22px}
-.country-flag.flag-cn {background-position: -208px -22px}
-.country-flag.flag-co {background-position: -224px -22px}
-.country-flag.flag-cr {background-position: -240px -22px}
-.country-flag.flag-cu {background-position: 0 -33px}
-.country-flag.flag-cv {background-position: -16px -33px}
-.country-flag.flag-cw {background-position: -32px -33px}
-.country-flag.flag-cy {background-position: -48px -33px}
-.country-flag.flag-cz {background-position: -64px -33px}
-.country-flag.flag-de {background-position: -80px -33px}
-.country-flag.flag-dj {background-position: -96px -33px}
-.country-flag.flag-dk {background-position: -112px -33px}
-.country-flag.flag-dm {background-position: -128px -33px}
-.country-flag.flag-do {background-position: -144px -33px}
-.country-flag.flag-dz {background-position: -160px -33px}
-.country-flag.flag-ec {background-position: -176px -33px}
-.country-flag.flag-ee {background-position: -192px -33px}
-.country-flag.flag-eg {background-position: -208px -33px}
-.country-flag.flag-eh {background-position: -224px -33px}
-.country-flag.flag-england {background-position: -240px -33px}
-.country-flag.flag-er {background-position: 0 -44px}
-.country-flag.flag-es {background-position: -16px -44px}
-.country-flag.flag-et {background-position: -32px -44px}
-.country-flag.flag-eu {background-position: -48px -44px}
-.country-flag.flag-fi {background-position: -64px -44px}
-.country-flag.flag-fj {background-position: -80px -44px}
-.country-flag.flag-fk {background-position: -96px -44px}
-.country-flag.flag-fm {background-position: -112px -44px}
-.country-flag.flag-fo {background-position: -128px -44px}
-.country-flag.flag-fr {background-position: -144px -44px}
-.country-flag.flag-ga {background-position: -160px -44px}
-.country-flag.flag-gb {background-position: -176px -44px}
-.country-flag.flag-gd {background-position: -192px -44px}
-.country-flag.flag-ge {background-position: -208px -44px}
-.country-flag.flag-gf {background-position: -224px -44px}
-.country-flag.flag-gg {background-position: -240px -44px}
-.country-flag.flag-gh {background-position: 0 -55px}
-.country-flag.flag-gi {background-position: -16px -55px}
-.country-flag.flag-gl {background-position: -32px -55px}
-.country-flag.flag-gm {background-position: -48px -55px}
-.country-flag.flag-gn {background-position: -64px -55px}
-.country-flag.flag-gp {background-position: -80px -55px}
-.country-flag.flag-gq {background-position: -96px -55px}
-.country-flag.flag-gr {background-position: -112px -55px}
-.country-flag.flag-gs {background-position: -128px -55px}
-.country-flag.flag-gt {background-position: -144px -55px}
-.country-flag.flag-gu {background-position: -160px -55px}
-.country-flag.flag-gw {background-position: -176px -55px}
-.country-flag.flag-gy {background-position: -192px -55px}
-.country-flag.flag-hk {background-position: -208px -55px}
-.country-flag.flag-hm {background-position: -224px -55px}
-.country-flag.flag-hn {background-position: -240px -55px}
-.country-flag.flag-hr {background-position: 0 -66px}
-.country-flag.flag-ht {background-position: -16px -66px}
-.country-flag.flag-hu {background-position: -32px -66px}
-.country-flag.flag-ic {background-position: -48px -66px}
-.country-flag.flag-id {background-position: -64px -66px}
-.country-flag.flag-ie {background-position: -80px -66px}
-.country-flag.flag-il {background-position: -96px -66px}
-.country-flag.flag-im {background-position: -112px -66px}
-.country-flag.flag-in {background-position: -128px -66px}
-.country-flag.flag-io {background-position: -144px -66px}
-.country-flag.flag-iq {background-position: -160px -66px}
-.country-flag.flag-ir {background-position: -176px -66px}
-.country-flag.flag-is {background-position: -192px -66px}
-.country-flag.flag-it {background-position: -208px -66px}
-.country-flag.flag-je {background-position: -224px -66px}
-.country-flag.flag-jm {background-position: -240px -66px}
-.country-flag.flag-jo {background-position: 0 -77px}
-.country-flag.flag-jp {background-position: -16px -77px}
-.country-flag.flag-ke {background-position: -32px -77px}
-.country-flag.flag-kg {background-position: -48px -77px}
-.country-flag.flag-kh {background-position: -64px -77px}
-.country-flag.flag-ki {background-position: -80px -77px}
-.country-flag.flag-km {background-position: -96px -77px}
-.country-flag.flag-kn {background-position: -112px -77px}
-.country-flag.flag-kp {background-position: -128px -77px}
-.country-flag.flag-kr {background-position: -144px -77px}
-.country-flag.flag-kurdistan {background-position: -160px -77px}
-.country-flag.flag-kw {background-position: -176px -77px}
-.country-flag.flag-ky {background-position: -192px -77px}
-.country-flag.flag-kz {background-position: -208px -77px}
-.country-flag.flag-la {background-position: -224px -77px}
-.country-flag.flag-lb {background-position: -240px -77px}
-.country-flag.flag-lc {background-position: 0 -88px}
-.country-flag.flag-li {background-position: -16px -88px}
-.country-flag.flag-lk {background-position: -32px -88px}
-.country-flag.flag-lr {background-position: -48px -88px}
-.country-flag.flag-ls {background-position: -64px -88px}
-.country-flag.flag-lt {background-position: -80px -88px}
-.country-flag.flag-lu {background-position: -96px -88px}
-.country-flag.flag-lv {background-position: -112px -88px}
-.country-flag.flag-ly {background-position: -128px -88px}
-.country-flag.flag-ma {background-position: -144px -88px}
-.country-flag.flag-mc {background-position: -160px -88px}
-.country-flag.flag-md {background-position: -176px -88px}
-.country-flag.flag-me {background-position: -192px -88px}
-.country-flag.flag-mg {background-position: -208px -88px}
-.country-flag.flag-mh {background-position: -224px -88px}
-.country-flag.flag-mk {background-position: -240px -88px}
-.country-flag.flag-ml {background-position: 0 -99px}
-.country-flag.flag-mm {background-position: -16px -99px}
-.country-flag.flag-mn {background-position: -32px -99px}
-.country-flag.flag-mo {background-position: -48px -99px}
-.country-flag.flag-mp {background-position: -64px -99px}
-.country-flag.flag-mq {background-position: -80px -99px}
-.country-flag.flag-mr {background-position: -96px -99px}
-.country-flag.flag-ms {background-position: -112px -99px}
-.country-flag.flag-mt {background-position: -128px -99px}
-.country-flag.flag-mu {background-position: -144px -99px}
-.country-flag.flag-mv {background-position: -160px -99px}
-.country-flag.flag-mw {background-position: -176px -99px}
-.country-flag.flag-mx {background-position: -192px -99px}
-.country-flag.flag-my {background-position: -208px -99px}
-.country-flag.flag-mz {background-position: -224px -99px}
-.country-flag.flag-na {background-position: -240px -99px}
-.country-flag.flag-nc {background-position: 0 -110px}
-.country-flag.flag-ne {background-position: -16px -110px}
-.country-flag.flag-nf {background-position: -32px -110px}
-.country-flag.flag-ng {background-position: -48px -110px}
-.country-flag.flag-ni {background-position: -64px -110px}
-.country-flag.flag-nl {background-position: -80px -110px}
-.country-flag.flag-no {background-position: -96px -110px}
-.country-flag.flag-np {background-position: -112px -110px}
-.country-flag.flag-nr {background-position: -128px -110px}
-.country-flag.flag-nu {background-position: -144px -110px}
-.country-flag.flag-nz {background-position: -160px -110px}
-.country-flag.flag-om {background-position: -176px -110px}
-.country-flag.flag-pa {background-position: -192px -110px}
-.country-flag.flag-pe {background-position: -208px -110px}
-.country-flag.flag-pf {background-position: -224px -110px}
-.country-flag.flag-pg {background-position: -240px -110px}
-.country-flag.flag-ph {background-position: 0 -121px}
-.country-flag.flag-pk {background-position: -16px -121px}
-.country-flag.flag-pl {background-position: -32px -121px}
-.country-flag.flag-pm {background-position: -48px -121px}
-.country-flag.flag-pn {background-position: -64px -121px}
-.country-flag.flag-pr {background-position: -80px -121px}
-.country-flag.flag-ps {background-position: -96px -121px}
-.country-flag.flag-pt {background-position: -112px -121px}
-.country-flag.flag-pw {background-position: -128px -121px}
-.country-flag.flag-py {background-position: -144px -121px}
-.country-flag.flag-qa {background-position: -160px -121px}
-.country-flag.flag-re {background-position: -176px -121px}
-.country-flag.flag-ro {background-position: -192px -121px}
-.country-flag.flag-rs {background-position: -208px -121px}
-.country-flag.flag-ru {background-position: -224px -121px}
-.country-flag.flag-rw {background-position: -240px -121px}
-.country-flag.flag-sa {background-position: 0 -132px}
-.country-flag.flag-sb {background-position: -16px -132px}
-.country-flag.flag-sc {background-position: -32px -132px}
-.country-flag.flag-scotland {background-position: -48px -132px}
-.country-flag.flag-sd {background-position: -64px -132px}
-.country-flag.flag-se {background-position: -80px -132px}
-.country-flag.flag-sg {background-position: -96px -132px}
-.country-flag.flag-sh {background-position: -112px -132px}
-.country-flag.flag-si {background-position: -128px -132px}
-.country-flag.flag-sk {background-position: -144px -132px}
-.country-flag.flag-sl {background-position: -160px -132px}
-.country-flag.flag-sm {background-position: -176px -132px}
-.country-flag.flag-sn {background-position: -192px -132px}
-.country-flag.flag-so {background-position: -208px -132px}
-.country-flag.flag-somaliland {background-position: -224px -132px}
-.country-flag.flag-sr {background-position: -240px -132px}
-.country-flag.flag-ss {background-position: 0 -143px}
-.country-flag.flag-st {background-position: -16px -143px}
-.country-flag.flag-sv {background-position: -32px -143px}
-.country-flag.flag-sx {background-position: -48px -143px}
-.country-flag.flag-sy {background-position: -64px -143px}
-.country-flag.flag-sz {background-position: -80px -143px}
-.country-flag.flag-tc {background-position: -96px -143px}
-.country-flag.flag-td {background-position: -112px -143px}
-.country-flag.flag-tf {background-position: -128px -143px}
-.country-flag.flag-tg {background-position: -144px -143px}
-.country-flag.flag-th {background-position: -160px -143px}
-.country-flag.flag-tj {background-position: -176px -143px}
-.country-flag.flag-tk {background-position: -192px -143px}
-.country-flag.flag-tl {background-position: -208px -143px}
-.country-flag.flag-tm {background-position: -224px -143px}
-.country-flag.flag-tn {background-position: -240px -143px}
-.country-flag.flag-to {background-position: 0 -154px}
-.country-flag.flag-tr {background-position: -16px -154px}
-.country-flag.flag-tt {background-position: -32px -154px}
-.country-flag.flag-tv {background-position: -48px -154px}
-.country-flag.flag-tw {background-position: -64px -154px}
-.country-flag.flag-tz {background-position: -80px -154px}
-.country-flag.flag-ua {background-position: -96px -154px}
-.country-flag.flag-ug {background-position: -112px -154px}
-.country-flag.flag-um {background-position: -128px -154px}
-.country-flag.flag-us {background-position: -144px -154px}
-.country-flag.flag-uy {background-position: -160px -154px}
-.country-flag.flag-uz {background-position: -176px -154px}
-.country-flag.flag-va {background-position: -192px -154px}
-.country-flag.flag-vc {background-position: -208px -154px}
-.country-flag.flag-ve {background-position: -224px -154px}
-.country-flag.flag-vg {background-position: -240px -154px}
-.country-flag.flag-vi {background-position: 0 -165px}
-.country-flag.flag-vn {background-position: -16px -165px}
-.country-flag.flag-vu {background-position: -32px -165px}
-.country-flag.flag-wales {background-position: -48px -165px}
-.country-flag.flag-wf {background-position: -64px -165px}
-.country-flag.flag-ws {background-position: -80px -165px}
-.country-flag.flag-ye {background-position: -96px -165px}
-.country-flag.flag-yt {background-position: -112px -165px}
-.country-flag.flag-za {background-position: -128px -165px}
-.country-flag.flag-zanzibar {background-position: -144px -165px}
-.country-flag.flag-zm {background-position: -160px -165px}
-.country-flag.flag-zw {background-position: -176px -165px}
+.country-flag {
+ width: 16px;
+ height: 11px;
+ background:url(../images/country-flags.png) no-repeat
+}
+
+.country-flag.flag-ad {background-position: -16px 0}
+.country-flag.flag-ae {background-position: -32px 0}
+.country-flag.flag-af {background-position: -48px 0}
+.country-flag.flag-ag {background-position: -64px 0}
+.country-flag.flag-ai {background-position: -80px 0}
+.country-flag.flag-al {background-position: -96px 0}
+.country-flag.flag-am {background-position: -112px 0}
+.country-flag.flag-an {background-position: -128px 0}
+.country-flag.flag-ao {background-position: -144px 0}
+.country-flag.flag-ar {background-position: -160px 0}
+.country-flag.flag-as {background-position: -176px 0}
+.country-flag.flag-at {background-position: -192px 0}
+.country-flag.flag-au {background-position: -208px 0}
+.country-flag.flag-aw {background-position: -224px 0}
+.country-flag.flag-az {background-position: -240px 0}
+.country-flag.flag-ba {background-position: 0 -11px}
+.country-flag.flag-bb {background-position: -16px -11px}
+.country-flag.flag-bd {background-position: -32px -11px}
+.country-flag.flag-be {background-position: -48px -11px}
+.country-flag.flag-bf {background-position: -64px -11px}
+.country-flag.flag-bg {background-position: -80px -11px}
+.country-flag.flag-bh {background-position: -96px -11px}
+.country-flag.flag-bi {background-position: -112px -11px}
+.country-flag.flag-bj {background-position: -128px -11px}
+.country-flag.flag-bm {background-position: -144px -11px}
+.country-flag.flag-bn {background-position: -160px -11px}
+.country-flag.flag-bo {background-position: -176px -11px}
+.country-flag.flag-br {background-position: -192px -11px}
+.country-flag.flag-bs {background-position: -208px -11px}
+.country-flag.flag-bt {background-position: -224px -11px}
+.country-flag.flag-bv {background-position: -240px -11px}
+.country-flag.flag-bw {background-position: 0 -22px}
+.country-flag.flag-by {background-position: -16px -22px}
+.country-flag.flag-bz {background-position: -32px -22px}
+.country-flag.flag-ca {background-position: -48px -22px}
+.country-flag.flag-catalonia {background-position: -64px -22px}
+.country-flag.flag-cd {background-position: -80px -22px}
+.country-flag.flag-cf {background-position: -96px -22px}
+.country-flag.flag-cg {background-position: -112px -22px}
+.country-flag.flag-ch {background-position: -128px -22px}
+.country-flag.flag-ci {background-position: -144px -22px}
+.country-flag.flag-ck {background-position: -160px -22px}
+.country-flag.flag-cl {background-position: -176px -22px}
+.country-flag.flag-cm {background-position: -192px -22px}
+.country-flag.flag-cn {background-position: -208px -22px}
+.country-flag.flag-co {background-position: -224px -22px}
+.country-flag.flag-cr {background-position: -240px -22px}
+.country-flag.flag-cu {background-position: 0 -33px}
+.country-flag.flag-cv {background-position: -16px -33px}
+.country-flag.flag-cw {background-position: -32px -33px}
+.country-flag.flag-cy {background-position: -48px -33px}
+.country-flag.flag-cz {background-position: -64px -33px}
+.country-flag.flag-de {background-position: -80px -33px}
+.country-flag.flag-dj {background-position: -96px -33px}
+.country-flag.flag-dk {background-position: -112px -33px}
+.country-flag.flag-dm {background-position: -128px -33px}
+.country-flag.flag-do {background-position: -144px -33px}
+.country-flag.flag-dz {background-position: -160px -33px}
+.country-flag.flag-ec {background-position: -176px -33px}
+.country-flag.flag-ee {background-position: -192px -33px}
+.country-flag.flag-eg {background-position: -208px -33px}
+.country-flag.flag-eh {background-position: -224px -33px}
+.country-flag.flag-england {background-position: -240px -33px}
+.country-flag.flag-er {background-position: 0 -44px}
+.country-flag.flag-es {background-position: -16px -44px}
+.country-flag.flag-et {background-position: -32px -44px}
+.country-flag.flag-eu {background-position: -48px -44px}
+.country-flag.flag-fi {background-position: -64px -44px}
+.country-flag.flag-fj {background-position: -80px -44px}
+.country-flag.flag-fk {background-position: -96px -44px}
+.country-flag.flag-fm {background-position: -112px -44px}
+.country-flag.flag-fo {background-position: -128px -44px}
+.country-flag.flag-fr {background-position: -144px -44px}
+.country-flag.flag-ga {background-position: -160px -44px}
+.country-flag.flag-gb {background-position: -176px -44px}
+.country-flag.flag-gd {background-position: -192px -44px}
+.country-flag.flag-ge {background-position: -208px -44px}
+.country-flag.flag-gf {background-position: -224px -44px}
+.country-flag.flag-gg {background-position: -240px -44px}
+.country-flag.flag-gh {background-position: 0 -55px}
+.country-flag.flag-gi {background-position: -16px -55px}
+.country-flag.flag-gl {background-position: -32px -55px}
+.country-flag.flag-gm {background-position: -48px -55px}
+.country-flag.flag-gn {background-position: -64px -55px}
+.country-flag.flag-gp {background-position: -80px -55px}
+.country-flag.flag-gq {background-position: -96px -55px}
+.country-flag.flag-gr {background-position: -112px -55px}
+.country-flag.flag-gs {background-position: -128px -55px}
+.country-flag.flag-gt {background-position: -144px -55px}
+.country-flag.flag-gu {background-position: -160px -55px}
+.country-flag.flag-gw {background-position: -176px -55px}
+.country-flag.flag-gy {background-position: -192px -55px}
+.country-flag.flag-hk {background-position: -208px -55px}
+.country-flag.flag-hm {background-position: -224px -55px}
+.country-flag.flag-hn {background-position: -240px -55px}
+.country-flag.flag-hr {background-position: 0 -66px}
+.country-flag.flag-ht {background-position: -16px -66px}
+.country-flag.flag-hu {background-position: -32px -66px}
+.country-flag.flag-ic {background-position: -48px -66px}
+.country-flag.flag-id {background-position: -64px -66px}
+.country-flag.flag-ie {background-position: -80px -66px}
+.country-flag.flag-il {background-position: -96px -66px}
+.country-flag.flag-im {background-position: -112px -66px}
+.country-flag.flag-in {background-position: -128px -66px}
+.country-flag.flag-io {background-position: -144px -66px}
+.country-flag.flag-iq {background-position: -160px -66px}
+.country-flag.flag-ir {background-position: -176px -66px}
+.country-flag.flag-is {background-position: -192px -66px}
+.country-flag.flag-it {background-position: -208px -66px}
+.country-flag.flag-je {background-position: -224px -66px}
+.country-flag.flag-jm {background-position: -240px -66px}
+.country-flag.flag-jo {background-position: 0 -77px}
+.country-flag.flag-jp {background-position: -16px -77px}
+.country-flag.flag-ke {background-position: -32px -77px}
+.country-flag.flag-kg {background-position: -48px -77px}
+.country-flag.flag-kh {background-position: -64px -77px}
+.country-flag.flag-ki {background-position: -80px -77px}
+.country-flag.flag-km {background-position: -96px -77px}
+.country-flag.flag-kn {background-position: -112px -77px}
+.country-flag.flag-kp {background-position: -128px -77px}
+.country-flag.flag-kr {background-position: -144px -77px}
+.country-flag.flag-kurdistan {background-position: -160px -77px}
+.country-flag.flag-kw {background-position: -176px -77px}
+.country-flag.flag-ky {background-position: -192px -77px}
+.country-flag.flag-kz {background-position: -208px -77px}
+.country-flag.flag-la {background-position: -224px -77px}
+.country-flag.flag-lb {background-position: -240px -77px}
+.country-flag.flag-lc {background-position: 0 -88px}
+.country-flag.flag-li {background-position: -16px -88px}
+.country-flag.flag-lk {background-position: -32px -88px}
+.country-flag.flag-lr {background-position: -48px -88px}
+.country-flag.flag-ls {background-position: -64px -88px}
+.country-flag.flag-lt {background-position: -80px -88px}
+.country-flag.flag-lu {background-position: -96px -88px}
+.country-flag.flag-lv {background-position: -112px -88px}
+.country-flag.flag-ly {background-position: -128px -88px}
+.country-flag.flag-ma {background-position: -144px -88px}
+.country-flag.flag-mc {background-position: -160px -88px}
+.country-flag.flag-md {background-position: -176px -88px}
+.country-flag.flag-me {background-position: -192px -88px}
+.country-flag.flag-mg {background-position: -208px -88px}
+.country-flag.flag-mh {background-position: -224px -88px}
+.country-flag.flag-mk {background-position: -240px -88px}
+.country-flag.flag-ml {background-position: 0 -99px}
+.country-flag.flag-mm {background-position: -16px -99px}
+.country-flag.flag-mn {background-position: -32px -99px}
+.country-flag.flag-mo {background-position: -48px -99px}
+.country-flag.flag-mp {background-position: -64px -99px}
+.country-flag.flag-mq {background-position: -80px -99px}
+.country-flag.flag-mr {background-position: -96px -99px}
+.country-flag.flag-ms {background-position: -112px -99px}
+.country-flag.flag-mt {background-position: -128px -99px}
+.country-flag.flag-mu {background-position: -144px -99px}
+.country-flag.flag-mv {background-position: -160px -99px}
+.country-flag.flag-mw {background-position: -176px -99px}
+.country-flag.flag-mx {background-position: -192px -99px}
+.country-flag.flag-my {background-position: -208px -99px}
+.country-flag.flag-mz {background-position: -224px -99px}
+.country-flag.flag-na {background-position: -240px -99px}
+.country-flag.flag-nc {background-position: 0 -110px}
+.country-flag.flag-ne {background-position: -16px -110px}
+.country-flag.flag-nf {background-position: -32px -110px}
+.country-flag.flag-ng {background-position: -48px -110px}
+.country-flag.flag-ni {background-position: -64px -110px}
+.country-flag.flag-nl {background-position: -80px -110px}
+.country-flag.flag-no {background-position: -96px -110px}
+.country-flag.flag-np {background-position: -112px -110px}
+.country-flag.flag-nr {background-position: -128px -110px}
+.country-flag.flag-nu {background-position: -144px -110px}
+.country-flag.flag-nz {background-position: -160px -110px}
+.country-flag.flag-om {background-position: -176px -110px}
+.country-flag.flag-pa {background-position: -192px -110px}
+.country-flag.flag-pe {background-position: -208px -110px}
+.country-flag.flag-pf {background-position: -224px -110px}
+.country-flag.flag-pg {background-position: -240px -110px}
+.country-flag.flag-ph {background-position: 0 -121px}
+.country-flag.flag-pk {background-position: -16px -121px}
+.country-flag.flag-pl {background-position: -32px -121px}
+.country-flag.flag-pm {background-position: -48px -121px}
+.country-flag.flag-pn {background-position: -64px -121px}
+.country-flag.flag-pr {background-position: -80px -121px}
+.country-flag.flag-ps {background-position: -96px -121px}
+.country-flag.flag-pt {background-position: -112px -121px}
+.country-flag.flag-pw {background-position: -128px -121px}
+.country-flag.flag-py {background-position: -144px -121px}
+.country-flag.flag-qa {background-position: -160px -121px}
+.country-flag.flag-re {background-position: -176px -121px}
+.country-flag.flag-ro {background-position: -192px -121px}
+.country-flag.flag-rs {background-position: -208px -121px}
+.country-flag.flag-ru {background-position: -224px -121px}
+.country-flag.flag-rw {background-position: -240px -121px}
+.country-flag.flag-sa {background-position: 0 -132px}
+.country-flag.flag-sb {background-position: -16px -132px}
+.country-flag.flag-sc {background-position: -32px -132px}
+.country-flag.flag-scotland {background-position: -48px -132px}
+.country-flag.flag-sd {background-position: -64px -132px}
+.country-flag.flag-se {background-position: -80px -132px}
+.country-flag.flag-sg {background-position: -96px -132px}
+.country-flag.flag-sh {background-position: -112px -132px}
+.country-flag.flag-si {background-position: -128px -132px}
+.country-flag.flag-sk {background-position: -144px -132px}
+.country-flag.flag-sl {background-position: -160px -132px}
+.country-flag.flag-sm {background-position: -176px -132px}
+.country-flag.flag-sn {background-position: -192px -132px}
+.country-flag.flag-so {background-position: -208px -132px}
+.country-flag.flag-somaliland {background-position: -224px -132px}
+.country-flag.flag-sr {background-position: -240px -132px}
+.country-flag.flag-ss {background-position: 0 -143px}
+.country-flag.flag-st {background-position: -16px -143px}
+.country-flag.flag-sv {background-position: -32px -143px}
+.country-flag.flag-sx {background-position: -48px -143px}
+.country-flag.flag-sy {background-position: -64px -143px}
+.country-flag.flag-sz {background-position: -80px -143px}
+.country-flag.flag-tc {background-position: -96px -143px}
+.country-flag.flag-td {background-position: -112px -143px}
+.country-flag.flag-tf {background-position: -128px -143px}
+.country-flag.flag-tg {background-position: -144px -143px}
+.country-flag.flag-th {background-position: -160px -143px}
+.country-flag.flag-tj {background-position: -176px -143px}
+.country-flag.flag-tk {background-position: -192px -143px}
+.country-flag.flag-tl {background-position: -208px -143px}
+.country-flag.flag-tm {background-position: -224px -143px}
+.country-flag.flag-tn {background-position: -240px -143px}
+.country-flag.flag-to {background-position: 0 -154px}
+.country-flag.flag-tr {background-position: -16px -154px}
+.country-flag.flag-tt {background-position: -32px -154px}
+.country-flag.flag-tv {background-position: -48px -154px}
+.country-flag.flag-tw {background-position: -64px -154px}
+.country-flag.flag-tz {background-position: -80px -154px}
+.country-flag.flag-ua {background-position: -96px -154px}
+.country-flag.flag-ug {background-position: -112px -154px}
+.country-flag.flag-um {background-position: -128px -154px}
+.country-flag.flag-us {background-position: -144px -154px}
+.country-flag.flag-uy {background-position: -160px -154px}
+.country-flag.flag-uz {background-position: -176px -154px}
+.country-flag.flag-va {background-position: -192px -154px}
+.country-flag.flag-vc {background-position: -208px -154px}
+.country-flag.flag-ve {background-position: -224px -154px}
+.country-flag.flag-vg {background-position: -240px -154px}
+.country-flag.flag-vi {background-position: 0 -165px}
+.country-flag.flag-vn {background-position: -16px -165px}
+.country-flag.flag-vu {background-position: -32px -165px}
+.country-flag.flag-wales {background-position: -48px -165px}
+.country-flag.flag-wf {background-position: -64px -165px}
+.country-flag.flag-ws {background-position: -80px -165px}
+.country-flag.flag-ye {background-position: -96px -165px}
+.country-flag.flag-yt {background-position: -112px -165px}
+.country-flag.flag-za {background-position: -128px -165px}
+.country-flag.flag-zanzibar {background-position: -144px -165px}
+.country-flag.flag-zm {background-position: -160px -165px}
+.country-flag.flag-zw {background-position: -176px -165px}
diff --git a/gui/slick/images/anidb24.png b/gui/slick/images/anidb24.png
new file mode 100644
index 00000000..617f00b2
Binary files /dev/null and b/gui/slick/images/anidb24.png differ
diff --git a/gui/slick/images/network/33.png b/gui/slick/images/network/33.png
index d6d78e9d..d205e754 100644
Binary files a/gui/slick/images/network/33.png and b/gui/slick/images/network/33.png differ
diff --git a/gui/slick/images/network/abc (australia).png b/gui/slick/images/network/abc (australia).png
index ee320b38..ed50dcaf 100644
Binary files a/gui/slick/images/network/abc (australia).png and b/gui/slick/images/network/abc (australia).png differ
diff --git a/gui/slick/images/network/abc australia.png b/gui/slick/images/network/abc australia.png
index ee320b38..ed50dcaf 100644
Binary files a/gui/slick/images/network/abc australia.png and b/gui/slick/images/network/abc australia.png differ
diff --git a/gui/slick/images/network/abc family.png b/gui/slick/images/network/abc family.png
index 7a7d92f5..addbd3db 100644
Binary files a/gui/slick/images/network/abc family.png and b/gui/slick/images/network/abc family.png differ
diff --git a/gui/slick/images/network/abc1.png b/gui/slick/images/network/abc1.png
index ee320b38..ed50dcaf 100644
Binary files a/gui/slick/images/network/abc1.png and b/gui/slick/images/network/abc1.png differ
diff --git a/gui/slick/images/network/adult swim.png b/gui/slick/images/network/adult swim.png
index 7f0af6f2..1ae90839 100644
Binary files a/gui/slick/images/network/adult swim.png and b/gui/slick/images/network/adult swim.png differ
diff --git a/gui/slick/images/network/adult-swim.png b/gui/slick/images/network/adult-swim.png
index 7f0af6f2..1ae90839 100644
Binary files a/gui/slick/images/network/adult-swim.png and b/gui/slick/images/network/adult-swim.png differ
diff --git a/gui/slick/images/network/adultswim.png b/gui/slick/images/network/adultswim.png
index 7f0af6f2..1ae90839 100644
Binary files a/gui/slick/images/network/adultswim.png and b/gui/slick/images/network/adultswim.png differ
diff --git a/gui/slick/images/network/anime network.png b/gui/slick/images/network/anime network.png
index 0ee27099..d462d377 100644
Binary files a/gui/slick/images/network/anime network.png and b/gui/slick/images/network/anime network.png differ
diff --git a/gui/slick/images/network/ard.png b/gui/slick/images/network/ard.png
index 972adfbe..6b39a714 100644
Binary files a/gui/slick/images/network/ard.png and b/gui/slick/images/network/ard.png differ
diff --git a/gui/slick/images/network/avro.png b/gui/slick/images/network/avro.png
index df699932..8eb0d7c6 100644
Binary files a/gui/slick/images/network/avro.png and b/gui/slick/images/network/avro.png differ
diff --git a/gui/slick/images/network/bbc america.png b/gui/slick/images/network/bbc america.png
index 02263846..908289eb 100644
Binary files a/gui/slick/images/network/bbc america.png and b/gui/slick/images/network/bbc america.png differ
diff --git a/gui/slick/images/network/bbc canada.png b/gui/slick/images/network/bbc canada.png
index 598d0ea6..7bf9a755 100644
Binary files a/gui/slick/images/network/bbc canada.png and b/gui/slick/images/network/bbc canada.png differ
diff --git a/gui/slick/images/network/bbc entertainment.png b/gui/slick/images/network/bbc entertainment.png
index c2aad064..28b28c58 100644
Binary files a/gui/slick/images/network/bbc entertainment.png and b/gui/slick/images/network/bbc entertainment.png differ
diff --git a/gui/slick/images/network/bbc.png b/gui/slick/images/network/bbc.png
index c2aad064..28b28c58 100644
Binary files a/gui/slick/images/network/bbc.png and b/gui/slick/images/network/bbc.png differ
diff --git a/gui/slick/images/network/bloomberg.png b/gui/slick/images/network/bloomberg.png
index 7d6e1dce..b91248a9 100644
Binary files a/gui/slick/images/network/bloomberg.png and b/gui/slick/images/network/bloomberg.png differ
diff --git a/gui/slick/images/network/bravo.png b/gui/slick/images/network/bravo.png
index 248dd5e5..41329084 100644
Binary files a/gui/slick/images/network/bravo.png and b/gui/slick/images/network/bravo.png differ
diff --git a/gui/slick/images/network/canal+.png b/gui/slick/images/network/canal+.png
index 499ed8d3..2fd37bbb 100644
Binary files a/gui/slick/images/network/canal+.png and b/gui/slick/images/network/canal+.png differ
diff --git a/gui/slick/images/network/canvas.png b/gui/slick/images/network/canvas.png
index 95b579c3..73ae3f8c 100644
Binary files a/gui/slick/images/network/canvas.png and b/gui/slick/images/network/canvas.png differ
diff --git a/gui/slick/images/network/dave.png b/gui/slick/images/network/dave.png
index f8289c8a..e70efccc 100644
Binary files a/gui/slick/images/network/dave.png and b/gui/slick/images/network/dave.png differ
diff --git a/gui/slick/images/network/diy network.png b/gui/slick/images/network/diy network.png
index db6dc252..dbb7fcb5 100644
Binary files a/gui/slick/images/network/diy network.png and b/gui/slick/images/network/diy network.png differ
diff --git a/gui/slick/images/network/eo.png b/gui/slick/images/network/eo.png
new file mode 100644
index 00000000..fc9065f5
Binary files /dev/null and b/gui/slick/images/network/eo.png differ
diff --git a/gui/slick/images/network/funimation.png b/gui/slick/images/network/funimation.png
index dd531ec3..c2398c91 100644
Binary files a/gui/slick/images/network/funimation.png and b/gui/slick/images/network/funimation.png differ
diff --git a/gui/slick/images/network/fxx.png b/gui/slick/images/network/fxx.png
index 54863a7e..6e53e60e 100644
Binary files a/gui/slick/images/network/fxx.png and b/gui/slick/images/network/fxx.png differ
diff --git a/gui/slick/images/network/hbo.png b/gui/slick/images/network/hbo.png
index b02d40c4..271ee7cb 100644
Binary files a/gui/slick/images/network/hbo.png and b/gui/slick/images/network/hbo.png differ
diff --git a/gui/slick/images/network/hdnet.png b/gui/slick/images/network/hdnet.png
index 3ad69b81..6a07013e 100644
Binary files a/gui/slick/images/network/hdnet.png and b/gui/slick/images/network/hdnet.png differ
diff --git a/gui/slick/images/network/hgtv canada.png b/gui/slick/images/network/hgtv canada.png
index 6e64a5d1..a291cd46 100644
Binary files a/gui/slick/images/network/hgtv canada.png and b/gui/slick/images/network/hgtv canada.png differ
diff --git a/gui/slick/images/network/hgtv.png b/gui/slick/images/network/hgtv.png
index 86ee82b2..0d56590b 100644
Binary files a/gui/slick/images/network/hgtv.png and b/gui/slick/images/network/hgtv.png differ
diff --git a/gui/slick/images/network/kro.png b/gui/slick/images/network/kro.png
index 3af86472..3e080815 100644
Binary files a/gui/slick/images/network/kro.png and b/gui/slick/images/network/kro.png differ
diff --git a/gui/slick/images/network/max.png b/gui/slick/images/network/max.png
new file mode 100644
index 00000000..32dc4091
Binary files /dev/null and b/gui/slick/images/network/max.png differ
diff --git a/gui/slick/images/network/mtv.png b/gui/slick/images/network/mtv.png
index 47305d3d..0c36f2e1 100644
Binary files a/gui/slick/images/network/mtv.png and b/gui/slick/images/network/mtv.png differ
diff --git a/gui/slick/images/network/mtv2.png b/gui/slick/images/network/mtv2.png
index 00fef6ca..8bfb4570 100644
Binary files a/gui/slick/images/network/mtv2.png and b/gui/slick/images/network/mtv2.png differ
diff --git a/gui/slick/images/network/npo 1.png b/gui/slick/images/network/npo 1.png
new file mode 100644
index 00000000..aa1c1545
Binary files /dev/null and b/gui/slick/images/network/npo 1.png differ
diff --git a/gui/slick/images/network/npo 2.png b/gui/slick/images/network/npo 2.png
new file mode 100644
index 00000000..7dd8a4f9
Binary files /dev/null and b/gui/slick/images/network/npo 2.png differ
diff --git a/gui/slick/images/network/npo 3.png b/gui/slick/images/network/npo 3.png
new file mode 100644
index 00000000..f3001931
Binary files /dev/null and b/gui/slick/images/network/npo 3.png differ
diff --git a/gui/slick/images/network/nps.png b/gui/slick/images/network/nps.png
new file mode 100644
index 00000000..433c0b4c
Binary files /dev/null and b/gui/slick/images/network/nps.png differ
diff --git a/gui/slick/images/network/ntr.png b/gui/slick/images/network/ntr.png
new file mode 100644
index 00000000..ab1dc6cb
Binary files /dev/null and b/gui/slick/images/network/ntr.png differ
diff --git a/gui/slick/images/network/playboy tv.png b/gui/slick/images/network/playboy tv.png
index 8ac3ddb2..dce01d10 100644
Binary files a/gui/slick/images/network/playboy tv.png and b/gui/slick/images/network/playboy tv.png differ
diff --git a/gui/slick/images/network/private spice.png b/gui/slick/images/network/private spice.png
index 36626583..b87783d0 100644
Binary files a/gui/slick/images/network/private spice.png and b/gui/slick/images/network/private spice.png differ
diff --git a/gui/slick/images/network/quest.png b/gui/slick/images/network/quest.png
new file mode 100644
index 00000000..99fbd093
Binary files /dev/null and b/gui/slick/images/network/quest.png differ
diff --git a/gui/slick/images/network/radio west.png b/gui/slick/images/network/radio west.png
index cb737085..0d64d9c3 100644
Binary files a/gui/slick/images/network/radio west.png and b/gui/slick/images/network/radio west.png differ
diff --git a/gui/slick/images/network/sbs 6.png b/gui/slick/images/network/sbs 6.png
index b2da1e64..fb5a2454 100644
Binary files a/gui/slick/images/network/sbs 6.png and b/gui/slick/images/network/sbs 6.png differ
diff --git a/gui/slick/images/network/sbs 9.png b/gui/slick/images/network/sbs 9.png
new file mode 100644
index 00000000..42da25c1
Binary files /dev/null and b/gui/slick/images/network/sbs 9.png differ
diff --git a/gui/slick/images/network/sbs australia.png b/gui/slick/images/network/sbs australia.png
index 971b04b2..2973a9cb 100644
Binary files a/gui/slick/images/network/sbs australia.png and b/gui/slick/images/network/sbs australia.png differ
diff --git a/gui/slick/images/network/sbs.png b/gui/slick/images/network/sbs.png
index 971b04b2..2973a9cb 100644
Binary files a/gui/slick/images/network/sbs.png and b/gui/slick/images/network/sbs.png differ
diff --git a/gui/slick/images/network/sbs6.png b/gui/slick/images/network/sbs6.png
index b2da1e64..fb5a2454 100644
Binary files a/gui/slick/images/network/sbs6.png and b/gui/slick/images/network/sbs6.png differ
diff --git a/gui/slick/images/network/sky atlantic.png b/gui/slick/images/network/sky atlantic.png
index bd971a2b..2a64388d 100644
Binary files a/gui/slick/images/network/sky atlantic.png and b/gui/slick/images/network/sky atlantic.png differ
diff --git a/gui/slick/images/network/starz!.png b/gui/slick/images/network/starz!.png
index 15d2fe87..d1e6cd5c 100644
Binary files a/gui/slick/images/network/starz!.png and b/gui/slick/images/network/starz!.png differ
diff --git a/gui/slick/images/network/starz.png b/gui/slick/images/network/starz.png
index 15d2fe87..d1e6cd5c 100644
Binary files a/gui/slick/images/network/starz.png and b/gui/slick/images/network/starz.png differ
diff --git a/gui/slick/images/network/télé-québec.png b/gui/slick/images/network/tele-quebec.png
similarity index 100%
rename from gui/slick/images/network/télé-québec.png
rename to gui/slick/images/network/tele-quebec.png
diff --git a/gui/slick/images/network/the wb.png b/gui/slick/images/network/the wb.png
index b575a09c..a20f74bb 100644
Binary files a/gui/slick/images/network/the wb.png and b/gui/slick/images/network/the wb.png differ
diff --git a/gui/slick/images/network/thewb.png b/gui/slick/images/network/thewb.png
index b575a09c..a20f74bb 100644
Binary files a/gui/slick/images/network/thewb.png and b/gui/slick/images/network/thewb.png differ
diff --git a/gui/slick/images/network/tmf.png b/gui/slick/images/network/tmf.png
deleted file mode 100644
index 8fc18f02..00000000
Binary files a/gui/slick/images/network/tmf.png and /dev/null differ
diff --git a/gui/slick/images/network/tv west.png b/gui/slick/images/network/tv west.png
index cb737085..0d64d9c3 100644
Binary files a/gui/slick/images/network/tv west.png and b/gui/slick/images/network/tv west.png differ
diff --git a/gui/slick/images/network/tvi.png b/gui/slick/images/network/tvi.png
new file mode 100644
index 00000000..fb7a5fc2
Binary files /dev/null and b/gui/slick/images/network/tvi.png differ
diff --git a/gui/slick/images/network/vara.png b/gui/slick/images/network/vara.png
index 3f092ccc..18a32dba 100644
Binary files a/gui/slick/images/network/vara.png and b/gui/slick/images/network/vara.png differ
diff --git a/gui/slick/images/network/vpro.png b/gui/slick/images/network/vpro.png
index 3d7bb05e..f37b71f2 100644
Binary files a/gui/slick/images/network/vpro.png and b/gui/slick/images/network/vpro.png differ
diff --git a/gui/slick/images/providers/alpharatio.png b/gui/slick/images/providers/alpharatio.png
new file mode 100644
index 00000000..cd579d85
Binary files /dev/null and b/gui/slick/images/providers/alpharatio.png differ
diff --git a/gui/slick/images/providers/anidb.gif b/gui/slick/images/providers/anidb.gif
deleted file mode 100644
index c87ccaa3..00000000
Binary files a/gui/slick/images/providers/anidb.gif and /dev/null differ
diff --git a/gui/slick/images/providers/oldpiratebay.png b/gui/slick/images/providers/oldpiratebay.png
new file mode 100644
index 00000000..6e52fd53
Binary files /dev/null and b/gui/slick/images/providers/oldpiratebay.png differ
diff --git a/gui/slick/images/providers/rarbg.png b/gui/slick/images/providers/rarbg.png
new file mode 100644
index 00000000..17fea244
Binary files /dev/null and b/gui/slick/images/providers/rarbg.png differ
diff --git a/gui/slick/images/providers/shazbat.png b/gui/slick/images/providers/shazbat.png
new file mode 100644
index 00000000..8f86095c
Binary files /dev/null and b/gui/slick/images/providers/shazbat.png differ
diff --git a/gui/slick/interfaces/default/config_anime.tmpl b/gui/slick/interfaces/default/config_anime.tmpl
index 17f56de5..6d377c0b 100644
--- a/gui/slick/interfaces/default/config_anime.tmpl
+++ b/gui/slick/interfaces/default/config_anime.tmpl
@@ -31,7 +31,7 @@
-
+
AniDB is non-profit database of anime information that is freely open to the public
diff --git a/gui/slick/interfaces/default/config_general.tmpl b/gui/slick/interfaces/default/config_general.tmpl
index 2a9b6d57..a42dad03 100644
--- a/gui/slick/interfaces/default/config_general.tmpl
+++ b/gui/slick/interfaces/default/config_general.tmpl
@@ -63,6 +63,16 @@
+
+
+ When to update shows
+
+
+ with information such as next air dates, show ended, etc. Use 15 for 3pm, 4 for 4am etc. Anything over 23 or under 0 will be set to 0 (12am)
+
+
+
+
Update shows on startup
diff --git a/gui/slick/interfaces/default/config_notifications.tmpl b/gui/slick/interfaces/default/config_notifications.tmpl
index 1c64516c..68aedb53 100644
--- a/gui/slick/interfaces/default/config_notifications.tmpl
+++ b/gui/slick/interfaces/default/config_notifications.tmpl
@@ -1375,30 +1375,39 @@
Trakt username
-
-
+
username of your Trakt account.
-
+
Trakt password
-
-
+
password of your Trakt account.
-
+
+
+
+ API Timeout:
+
+
+
+
+ Seconds to wait for Trakt API to respond. (Use 0 to wait forever)
+
+
+
Default indexer:
diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl
index 276de372..a930087b 100755
--- a/gui/slick/interfaces/default/config_search.tmpl
+++ b/gui/slick/interfaces/default/config_search.tmpl
@@ -449,7 +449,6 @@
URL to your torrent client (e.g. http://localhost:8000/)
-
Note: rTorrent client URLs use e.g. scgi://localhost:5000/
@@ -467,12 +466,29 @@
+
+
+ Http Authentication
+
+
+ #set $http_authtype = {'none': "None", 'basic': "Basic", 'digest': "Digest"}
+ #for $authvalue,$authname in $http_authtype.items():
+ #set $selected = $html_selected if $sickbeard.TORRENT_AUTH_TYPE == $authvalue else ''
+ $authname
+ #end for
+
+
+
+
+
+
Verify certificate
/>
- disable if you get "Deluge: Authentication Error" in your log
+ disable if you get "Deluge: Authentication Error" in your log
+ Verify SSL certificates for HTTPS requests
diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl
index c5f5facd..dfc2b237 100644
--- a/gui/slick/interfaces/default/displayShow.tmpl
+++ b/gui/slick/interfaces/default/displayShow.tmpl
@@ -379,7 +379,7 @@
#if ($sickbeard.DISPLAY_FILESIZE == True):
style="min-width: 190px"
#end if
- >>Name
+ >Name
#if ($sickbeard.DISPLAY_FILESIZE == True):
Filesize
#end if
@@ -507,13 +507,15 @@
#if $sickbeard.USE_SUBTITLES and $show.subtitles:
- #if $epResult["subtitles"]:
- #for $sub_lang in subliminal.language.language_list([x.strip() for x in $epResult["subtitles"].split(',')]):
- #if sub_lang.alpha2 != ""
-
- #end if
- #end for
- #end if
+ #if $epResult["subtitles"]:
+ #for $sub_lang in subliminal.language.language_list([x.strip() for x in $epResult["subtitles"].split(',') if x != ""]):
+ #if sub_lang.alpha2 != ""
+
+ #else
+
+ #end if
+ #end for
+ #end if
#end if
diff --git a/gui/slick/interfaces/default/history.tmpl b/gui/slick/interfaces/default/history.tmpl
index 33995fd5..a47cf2ac 100644
--- a/gui/slick/interfaces/default/history.tmpl
+++ b/gui/slick/interfaces/default/history.tmpl
@@ -131,7 +131,11 @@
$hItem["show_name"] - <%="S%02i" % int(hItem["season"])+"E%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower() or "repack" in $hItem["resource"].lower() then ' Proper ' else ""#
#if $curStatus == SUBTITLED:
- ">
+ #if $sickbeard.SUBTITLES_MULTI:
+ " onError="this.onerror=null;this.src='$sbRoot/images/flags/unknown.png';">
+ #else
+
+ #end if
#end if
$statusStrings[$curStatus]
diff --git a/gui/slick/interfaces/default/home.tmpl b/gui/slick/interfaces/default/home.tmpl
index 49b3af7b..a2d29e07 100644
--- a/gui/slick/interfaces/default/home.tmpl
+++ b/gui/slick/interfaces/default/home.tmpl
@@ -104,7 +104,8 @@
filter_columnFilters: false,
filter_reset: '.resetshows'
},
- sortStable: true
+ sortStable: true,
+ sortAppend: [[1,0]]
});
\$("#showListTableAnime:has(tbody tr)").tablesorter({
@@ -126,13 +127,16 @@
filter_columnFilters: false,
filter_reset: '.resetanime'
},
- sortStable: true
+ sortStable: true,
+ sortAppend: [[1,0]]
});
- \$.tablesorter.filter.bindSearch( "#showListTableShows", \$('.search') );
+ if (\$("#showListTableShows").find("tbody").find("tr").size() > 0)
+ \$.tablesorter.filter.bindSearch( "#showListTableShows", \$('.search') );
#if $sickbeard.ANIME_SPLIT_HOME:
- \$.tablesorter.filter.bindSearch( "#showListTableAnime", \$('.search') );
+ if (\$("#showListTableAnime").find("tbody").find("tr").size() > 0)
+ \$.tablesorter.filter.bindSearch( "#showListTableAnime", \$('.search') );
#end if
#set $fuzzydate = 'airdate'
@@ -403,7 +407,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
#if $layout != 'simple':
#if $curShow.network:
-
+
#else:
#end if
@@ -561,7 +565,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
#if $layout != 'simple':
#if $curShow.network:
-
+
#else:
#end if
diff --git a/gui/slick/interfaces/default/inc_rootDirs.tmpl b/gui/slick/interfaces/default/inc_rootDirs.tmpl
index 25ec8a00..2bce6754 100644
--- a/gui/slick/interfaces/default/inc_rootDirs.tmpl
+++ b/gui/slick/interfaces/default/inc_rootDirs.tmpl
@@ -25,7 +25,3 @@
-
-
diff --git a/gui/slick/interfaces/default/manage_backlogOverview.tmpl b/gui/slick/interfaces/default/manage_backlogOverview.tmpl
index e83b4180..223c7199 100644
--- a/gui/slick/interfaces/default/manage_backlogOverview.tmpl
+++ b/gui/slick/interfaces/default/manage_backlogOverview.tmpl
@@ -46,22 +46,25 @@
#end if
#set $totalWanted = 0
#set $totalQual = 0
+#set $totalSnatched = 0
#for $curShow in $sickbeard.showList:
#set $totalWanted = $totalWanted + $showCounts[$curShow.indexerid][$Overview.WANTED]
#set $totalQual = $totalQual + $showCounts[$curShow.indexerid][$Overview.QUAL]
+#set $totalSnatched = $totalSnatched + $showCounts[$curShow.indexerid][$Overview.SNATCHED]
#end for
Jump to Show
#for $curShow in sorted($sickbeard.showList, key = operator.attrgetter('name')):
- #if $showCounts[$curShow.indexerid][$Overview.QUAL] + $showCounts[$curShow.indexerid][$Overview.WANTED] != 0:
+ #if $showCounts[$curShow.indexerid][$Overview.QUAL] + $showCounts[$curShow.indexerid][$Overview.WANTED] + $showCounts[$curShow.indexerid][$Overview.SNATCHED] != 0:
$curShow.name
#end if
#end for
@@ -72,7 +75,7 @@ Jump to Show
#for $curShow in sorted($sickbeard.showList, key = operator.attrgetter('name')):
-#if $showCounts[$curShow.indexerid][$Overview.QUAL] + $showCounts[$curShow.indexerid][$Overview.WANTED] == 0:
+#if $showCounts[$curShow.indexerid][$Overview.QUAL] + $showCounts[$curShow.indexerid][$Overview.WANTED] + $showCounts[$curShow.indexerid][$Overview.SNATCHED] == 0:
#continue
#end if
@@ -82,6 +85,7 @@ Jump to Show
Wanted: $showCounts[$curShow.indexerid][$Overview.WANTED]
Low Quality: $showCounts[$curShow.indexerid][$Overview.QUAL]
+
Snatched: $showCounts[$curShow.indexerid][$Overview.SNATCHED]
Force Backlog
@@ -97,7 +101,7 @@ Jump to Show
#continue
#end try
- #if $overview not in ($Overview.QUAL, $Overview.WANTED):
+ #if $overview not in ($Overview.QUAL, $Overview.WANTED, $Overview.SNATCHED):
#continue
#end if
diff --git a/gui/slick/js/ajaxEpSubtitles.js b/gui/slick/js/ajaxEpSubtitles.js
index c057ae40..c9e769b8 100644
--- a/gui/slick/js/ajaxEpSubtitles.js
+++ b/gui/slick/js/ajaxEpSubtitles.js
@@ -14,9 +14,9 @@
$.each(subtitles,function(index, language){
if (language != "" && language != "und") {
if (index != subtitles.length - 1) {
- subtitles_td.append($(" ").attr({"src": sbRoot+"/images/flags/"+language+".png", "alt": language, "width": 16, "height": 11}).css({'padding-right' : '6px','padding-bottom' : '4px'}));
+ subtitles_td.append($(" ").attr({"src": sbRoot+"/images/flags/"+language+".png", "alt": language, "width": 16, "height": 11}));
} else {
- subtitles_td.append($(" ").attr({"src": sbRoot+"/images/flags/"+language+".png", "alt": language, "width": 16, "height": 11}).css({'padding-bottom' : '4px'}));
+ subtitles_td.append($(" ").attr({"src": sbRoot+"/images/flags/"+language+".png", "alt": language, "width": 16, "height": 11}));
}
}
});
diff --git a/gui/slick/js/configNotifications.js b/gui/slick/js/configNotifications.js
index 2851bc53..b35c840c 100644
--- a/gui/slick/js/configNotifications.js
+++ b/gui/slick/js/configNotifications.js
@@ -102,7 +102,7 @@ $(document).ready(function(){
$('#boxcar2_accesstoken').removeClass('warning');
$(this).prop('disabled', true);
$('#testBoxcar2-result').html(loading);
- $.get(sbRoot + '/home/testBoxcar2', {'accessToken': boxcar2_accesstoken})
+ $.get(sbRoot + '/home/testBoxcar2', {'accesstoken': boxcar2_accesstoken})
.done(function (data) {
$('#testBoxcar2-result').html(data);
$('#testBoxcar2').prop('disabled', false);
@@ -426,20 +426,25 @@ $(document).ready(function(){
return false;
}
- var current_pushbullet_device = $("#pushbullet_device").val();
$.get(sbRoot + "/home/getPushbulletDevices", {'api': pushbullet_api},
function (data) {
var devices = jQuery.parseJSON(data).devices;
+ var current_pushbullet_device = $("#pushbullet_device").val();
$("#pushbullet_device_list").html('');
for (var i = 0; i < devices.length; i++) {
if(devices[i].active == true) {
if(current_pushbullet_device == devices[i].iden) {
- $("#pushbullet_device_list").append('' + devices[i].nickname + ' ')
+ $("#pushbullet_device_list").append('' + devices[i].nickname + ' ');
} else {
- $("#pushbullet_device_list").append('' + devices[i].nickname + ' ')
+ $("#pushbullet_device_list").append('' + devices[i].nickname + ' ');
}
}
}
+ if (current_pushbullet_device == "") {
+ $("#pushbullet_device_list").prepend('All devices ');
+ } else {
+ $("#pushbullet_device_list").prepend('All devices ');
+ }
if(msg) {
$('#testPushbullet-result').html(msg);
}
diff --git a/gui/slick/js/configProviders.js b/gui/slick/js/configProviders.js
index b46b6be3..5010030e 100644
--- a/gui/slick/js/configProviders.js
+++ b/gui/slick/js/configProviders.js
@@ -204,7 +204,7 @@ $(document).ready(function(){
$(this).getCategories(isDefault, data);
}
else {
- updateNewznabCaps( null, data );
+ $(this).updateNewznabCaps( null, data );
}
}
}
@@ -579,4 +579,4 @@ $(document).ready(function(){
$("#provider_order_list").disableSelection();
-});
\ No newline at end of file
+});
diff --git a/gui/slick/js/configSearch.js b/gui/slick/js/configSearch.js
index 24694843..4e9aef84 100644
--- a/gui/slick/js/configSearch.js
+++ b/gui/slick/js/configSearch.js
@@ -69,6 +69,9 @@ $(document).ready(function(){
$(host_desc_rtorrent).hide();
$(host_desc_torrent).show();
$(torrent_verify_cert_option).hide();
+ $(torrent_verify_deluge).hide();
+ $(torrent_verify_rtorrent).hide();
+ $(torrent_auth_type).hide();
$(torrent_path_option).show();
$(torrent_path_option).find('.fileBrowser').show();
$(torrent_seed_time_option).hide();
@@ -83,6 +86,7 @@ $(document).ready(function(){
client = 'uTorrent';
$(torrent_path_option).hide();
$(torrent_seed_time_option).show();
+ $('#host_desc_torrent').text('URL to your uTorrent client (e.g. http://localhost:8000)');
} else if ('transmission' == selectedProvider){
client = 'Transmission';
$(torrent_seed_time_option).show();
@@ -90,12 +94,16 @@ $(document).ready(function(){
$(torrent_label_option).hide();
$(torrent_label_anime_option).hide();
$(torrent_rpcurl_option).show();
+ $('#host_desc_torrent').text('URL to your Transmission client (e.g. http://localhost:9091)');
//$('#directory_title').text(client + directory);
} else if ('deluge' == selectedProvider){
client = 'Deluge';
$(torrent_verify_cert_option).show();
+ $(torrent_verify_deluge).show();
+ $(torrent_verify_rtorrent).hide();
$(label_warning_deluge).show();
$(label_anime_warning_deluge).show();
+ $('#host_desc_torrent').text('URL to your Deluge client (e.g. http://localhost:8112)');
//$('#directory_title').text(client + directory);
} else if ('download_station' == selectedProvider){
client = 'Synology DS';
@@ -103,13 +111,17 @@ $(document).ready(function(){
$(torrent_label_anime_option).hide();
$('#torrent_paused_option').hide();
$(torrent_path_option).find('.fileBrowser').hide();
+ $('#host_desc_torrent').text('URL to your Synology DS client (e.g. http://localhost:5000)');
//$('#directory_title').text(client + directory);
$(path_synology).show();
} else if ('rtorrent' == selectedProvider){
client = 'rTorrent';
- $(host_desc_torrent).hide();
- $(host_desc_rtorrent).show();
$(torrent_paused_option).hide();
+ $('#host_desc_torrent').text('URL to your rTorrent client (e.g. scgi://localhost:5000 or https://localhost/rutorrent/plugins/httprpc/action.php)');
+ $(torrent_verify_cert_option).show();
+ $(torrent_verify_deluge).hide();
+ $(torrent_verify_rtorrent).show();
+ $(torrent_auth_type).show();
//$('#directory_title').text(client + directory);
}
$('#host_title').text(client + host);
diff --git a/gui/slick/js/newShow.js b/gui/slick/js/newShow.js
index 3daea1f7..d907c372 100644
--- a/gui/slick/js/newShow.js
+++ b/gui/slick/js/newShow.js
@@ -51,7 +51,7 @@ $(document).ready(function () {
},
success: function (data) {
var firstResult = true;
- var resultStr = '\nSearch Results: \n';
+ var resultStr = '\nSearch Results: \n';
var checked = '';
if (data.results.length === 0) {
@@ -68,7 +68,7 @@ $(document).ready(function () {
var whichSeries = obj.join('|');
- resultStr += ' ';
+ resultStr += ' ';
if (data.langid && data.langid != "") {
resultStr += '' + obj[4] + ' ';
} else {
diff --git a/init.debian b/init.debian
index 4d625fd7..2b0aa7bb 100755
--- a/init.debian
+++ b/init.debian
@@ -144,7 +144,7 @@ case "$1" in
stop_sickbeard
sleep 2
start_sickbeard
- return $?
+ exit $?
;;
status)
status_of_proc -p "$PID_FILE" "$DAEMON" "$DESC"
diff --git a/lib/cachecontrol/adapter.py b/lib/cachecontrol/adapter.py
index d2ca7e87..b43b0f06 100644
--- a/lib/cachecontrol/adapter.py
+++ b/lib/cachecontrol/adapter.py
@@ -58,7 +58,11 @@ class CacheControlAdapter(HTTPAdapter):
response = cached_response
else:
# try to cache the response
- self.controller.cache_response(request, response)
+ try:
+ self.controller.cache_response(request, response)
+ except Exception as e:
+ # Failed to cache the results
+ pass
resp = super(CacheControlAdapter, self).build_response(
request, response
diff --git a/lib/lockfile/linklockfile.py b/lib/lockfile/linklockfile.py
index 9c506734..770350f2 100644
--- a/lib/lockfile/linklockfile.py
+++ b/lib/lockfile/linklockfile.py
@@ -5,6 +5,7 @@ import os
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
+import errno
class LinkLockFile(LockBase):
"""Lock access to a file using atomic property of link(2).
@@ -28,7 +29,9 @@ class LinkLockFile(LockBase):
# Try and create a hard link to it.
try:
os.link(self.unique_name, self.lock_file)
- except OSError:
+ except OSError as e:
+ if e.errno == errno.ENOSYS:
+ raise LockFailed("%s" % e.strerror)
# Link creation failed. Maybe we've double-locked?
nlinks = os.stat(self.unique_name).st_nlink
if nlinks == 2:
diff --git a/lib/requests/__init__.py b/lib/requests/__init__.py
index bba19002..0ec35660 100644
--- a/lib/requests/__init__.py
+++ b/lib/requests/__init__.py
@@ -13,7 +13,7 @@ Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:
>>> import requests
- >>> r = requests.get('http://python.org')
+ >>> r = requests.get('https://www.python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
@@ -22,7 +22,7 @@ usage:
... or POST:
>>> payload = dict(key1='value1', key2='value2')
- >>> r = requests.post("http://httpbin.org/post", data=payload)
+ >>> r = requests.post('http://httpbin.org/post', data=payload)
>>> print(r.text)
{
...
@@ -36,17 +36,17 @@ usage:
The other HTTP methods are supported - see `requests.api`. Full documentation
is at .
-:copyright: (c) 2014 by Kenneth Reitz.
+:copyright: (c) 2015 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
-__version__ = '2.3.0'
-__build__ = 0x020300
+__version__ = '2.5.1'
+__build__ = 0x020501
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2014 Kenneth Reitz'
+__copyright__ = 'Copyright 2015 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
diff --git a/lib/requests/adapters.py b/lib/requests/adapters.py
index 0f297ab2..c892853b 100644
--- a/lib/requests/adapters.py
+++ b/lib/requests/adapters.py
@@ -9,23 +9,27 @@ and maintain connections.
"""
import socket
-import copy
from .models import Response
+from .packages.urllib3 import Retry
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
-from .compat import urlparse, basestring, urldefrag, unquote
+from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
- except_on_missing_scheme, get_auth_from_url)
+ prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
from .structures import CaseInsensitiveDict
-from .packages.urllib3.exceptions import MaxRetryError
-from .packages.urllib3.exceptions import TimeoutError
-from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import ConnectTimeoutError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
+from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
+from .packages.urllib3.exceptions import ProtocolError
+from .packages.urllib3.exceptions import ReadTimeoutError
+from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import ResponseError
from .cookies import extract_cookies_to_jar
-from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
+from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
+ ProxyError, RetryError)
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
@@ -57,13 +61,17 @@ class HTTPAdapter(BaseAdapter):
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param int max_retries: The maximum number of retries each connection
- should attempt. Note, this applies only to failed connections and
- timeouts, never to requests where the server returns a response.
+ should attempt. Note, this applies only to failed DNS lookups, socket
+ connections and connection timeouts, never to requests where data has
+ made it to the server. By default, Requests does not retry failed
+ connections. If you need granular control over the conditions under
+ which we retry a request, import urllib3's ``Retry`` class and pass
+ that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
- >>> import lib.requests
+ >>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
@@ -74,7 +82,10 @@ class HTTPAdapter(BaseAdapter):
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
- self.max_retries = max_retries
+ if max_retries == DEFAULT_RETRIES:
+ self.max_retries = Retry(0, read=False)
+ else:
+ self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
@@ -102,14 +113,17 @@ class HTTPAdapter(BaseAdapter):
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
- def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
- """Initializes a urllib3 PoolManager. This method should not be called
- from user code, and is only exposed for use when subclassing the
+ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
+ """Initializes a urllib3 PoolManager.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
:class:`HTTPAdapter `.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
+ :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
@@ -117,7 +131,30 @@ class HTTPAdapter(BaseAdapter):
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
- block=block)
+ block=block, strict=True, **pool_kwargs)
+
+ def proxy_manager_for(self, proxy, **proxy_kwargs):
+ """Return urllib3 ProxyManager for the given proxy.
+
+ This method should not be called from user code, and is only
+ exposed for use when subclassing the
+ :class:`HTTPAdapter `.
+
+ :param proxy: The proxy to return a urllib3 ProxyManager for.
+ :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
+ :returns: ProxyManager
+ """
+ if not proxy in self.proxy_manager:
+ proxy_headers = self.proxy_headers(proxy)
+ self.proxy_manager[proxy] = proxy_from_url(
+ proxy,
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block,
+ **proxy_kwargs)
+
+ return self.proxy_manager[proxy]
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
@@ -204,18 +241,9 @@ class HTTPAdapter(BaseAdapter):
proxy = proxies.get(urlparse(url.lower()).scheme)
if proxy:
- except_on_missing_scheme(proxy)
- proxy_headers = self.proxy_headers(proxy)
-
- if not proxy in self.proxy_manager:
- self.proxy_manager[proxy] = proxy_from_url(
- proxy,
- proxy_headers=proxy_headers,
- num_pools=self._pool_connections,
- maxsize=self._pool_maxsize,
- block=self._pool_block)
-
- conn = self.proxy_manager[proxy].connection_from_url(url)
+ proxy = prepend_scheme_if_needed(proxy, 'http')
+ proxy_manager = self.proxy_manager_for(proxy)
+ conn = proxy_manager.connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
@@ -250,7 +278,7 @@ class HTTPAdapter(BaseAdapter):
proxy = proxies.get(scheme)
if proxy and scheme != 'https':
- url, _ = urldefrag(request.url)
+ url = urldefragauth(request.url)
else:
url = request.path_url
@@ -297,7 +325,10 @@ class HTTPAdapter(BaseAdapter):
:param request: The :class:`PreparedRequest ` being sent.
:param stream: (optional) Whether to stream the request content.
- :param timeout: (optional) The timeout on the request.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a (`connect timeout, read
+ timeout `_) tuple.
+ :type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
@@ -311,7 +342,18 @@ class HTTPAdapter(BaseAdapter):
chunked = not (request.body is None or 'Content-Length' in request.headers)
- timeout = TimeoutSauce(connect=timeout, read=timeout)
+ if isinstance(timeout, tuple):
+ try:
+ connect, read = timeout
+ timeout = TimeoutSauce(connect=connect, read=read)
+ except ValueError as e:
+ # this may raise a string formatting error.
+ err = ("Invalid timeout {0}. Pass a (connect, read) "
+ "timeout tuple, or a single float to set "
+ "both timeouts to the same value".format(timeout))
+ raise ValueError(err)
+ else:
+ timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
@@ -369,10 +411,16 @@ class HTTPAdapter(BaseAdapter):
# All is well, return the connection to the pool.
conn._put_conn(low_conn)
- except socket.error as sockerr:
- raise ConnectionError(sockerr, request=request)
+ except (ProtocolError, socket.error) as err:
+ raise ConnectionError(err, request=request)
except MaxRetryError as e:
+ if isinstance(e.reason, ConnectTimeoutError):
+ raise ConnectTimeout(e, request=request)
+
+ if isinstance(e.reason, ResponseError):
+ raise RetryError(e, request=request)
+
raise ConnectionError(e, request=request)
except _ProxyError as e:
@@ -381,14 +429,9 @@ class HTTPAdapter(BaseAdapter):
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e, request=request)
- elif isinstance(e, TimeoutError):
- raise Timeout(e, request=request)
+ elif isinstance(e, ReadTimeoutError):
+ raise ReadTimeout(e, request=request)
else:
raise
- r = self.build_response(request, resp)
-
- if not stream:
- r.content
-
- return r
\ No newline at end of file
+ return self.build_response(request, resp)
diff --git a/lib/requests/api.py b/lib/requests/api.py
index 01d853d5..1469b05c 100644
--- a/lib/requests/api.py
+++ b/lib/requests/api.py
@@ -22,12 +22,17 @@ def request(method, url, **kwargs):
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
- :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
+ :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
- :param timeout: (optional) Float describing the timeout of the request in seconds.
+ :param timeout: (optional) How long to wait for the server to send data
+ before giving up, as a float, or a (`connect timeout, read timeout
+ `_) tuple.
+ :type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
+ :type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
@@ -41,7 +46,12 @@ def request(method, url, **kwargs):
"""
session = sessions.Session()
- return session.request(method=method, url=url, **kwargs)
+ response = session.request(method=method, url=url, **kwargs)
+ # By explicitly closing the session, we avoid leaving sockets open which
+ # can trigger a ResourceWarning in some cases, and look like a memory leak
+ # in others.
+ session.close()
+ return response
def get(url, **kwargs):
@@ -77,15 +87,16 @@ def head(url, **kwargs):
return request('head', url, **kwargs)
-def post(url, data=None, **kwargs):
+def post(url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
- return request('post', url, data=data, **kwargs)
+ return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
diff --git a/lib/requests/auth.py b/lib/requests/auth.py
index 9f831b7a..b950181d 100644
--- a/lib/requests/auth.py
+++ b/lib/requests/auth.py
@@ -16,7 +16,8 @@ from base64 import b64encode
from .compat import urlparse, str
from .cookies import extract_cookies_to_jar
-from .utils import parse_dict_header
+from .utils import parse_dict_header, to_native_string
+from .status_codes import codes
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
@@ -25,7 +26,11 @@ CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
- return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1')
+ authstr = 'Basic ' + to_native_string(
+ b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
+ )
+
+ return authstr
class AuthBase(object):
@@ -62,6 +67,7 @@ class HTTPDigestAuth(AuthBase):
self.nonce_count = 0
self.chal = {}
self.pos = None
+ self.num_401_calls = 1
def build_digest_header(self, method, url):
@@ -146,6 +152,11 @@ class HTTPDigestAuth(AuthBase):
return 'Digest %s' % (base)
+ def handle_redirect(self, r, **kwargs):
+ """Reset num_401_calls counter on redirects."""
+ if r.is_redirect:
+ self.num_401_calls = 1
+
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
@@ -158,7 +169,7 @@ class HTTPDigestAuth(AuthBase):
if 'digest' in s_auth.lower() and num_401_calls < 2:
- setattr(self, 'num_401_calls', num_401_calls + 1)
+ self.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
@@ -178,7 +189,7 @@ class HTTPDigestAuth(AuthBase):
return _r
- setattr(self, 'num_401_calls', 1)
+ self.num_401_calls = 1
return r
def __call__(self, r):
@@ -188,6 +199,11 @@ class HTTPDigestAuth(AuthBase):
try:
self.pos = r.body.tell()
except AttributeError:
- pass
+ # In the case of HTTPDigestAuth being reused and the body of
+ # the previous request was a file-like object, pos has the
+ # file position of the previous body. Ensure it's set to
+ # None.
+ self.pos = None
r.register_hook('response', self.handle_401)
+ r.register_hook('response', self.handle_redirect)
return r
diff --git a/lib/requests/certs.py b/lib/requests/certs.py
index bc008261..07e64750 100644
--- a/lib/requests/certs.py
+++ b/lib/requests/certs.py
@@ -11,14 +11,15 @@ If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
"""
-
import os.path
-
-def where():
- """Return the preferred certificate bundle."""
- # vendored bundle inside Requests
- return os.path.join(os.path.dirname(__file__), 'cacert.pem')
+try:
+ from certifi import where
+except ImportError:
+ def where():
+ """Return the preferred certificate bundle."""
+ # vendored bundle inside Requests
+ return os.path.join(os.path.dirname(__file__), 'cacert.pem')
if __name__ == '__main__':
print(where())
diff --git a/lib/requests/compat.py b/lib/requests/compat.py
index bdf10d6a..c07726ee 100644
--- a/lib/requests/compat.py
+++ b/lib/requests/compat.py
@@ -75,7 +75,9 @@ is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
-except ImportError:
+except (ImportError, SyntaxError):
+ # simplejson does not support Python 3.2, it throws a SyntaxError
+ # because of u'...' Unicode literals.
import json
# ---------
@@ -90,7 +92,6 @@ if is_py2:
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
- from httplib import IncompleteRead
builtin_str = str
bytes = str
@@ -106,7 +107,6 @@ elif is_py3:
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
- from http.client import IncompleteRead
builtin_str = str
str = str
diff --git a/lib/requests/cookies.py b/lib/requests/cookies.py
index 831c49c6..6969fe5c 100644
--- a/lib/requests/cookies.py
+++ b/lib/requests/cookies.py
@@ -157,26 +157,28 @@ class CookieConflictError(RuntimeError):
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
- """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
- Don't use the dict interface internally; it's just for compatibility with
- with external client code. All `requests` code should work out of the box
- with externally provided instances of CookieJar, e.g., LWPCookieJar and
- FileCookieJar.
-
- Caution: dictionary operations that are normally O(1) may be O(n).
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
- """
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
- multiple domains. Caution: operation is O(n), not O(1)."""
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
@@ -199,37 +201,38 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return c
def iterkeys(self):
- """Dict-like iterkeys() that returns an iterator of names of cookies from the jar.
- See itervalues() and iteritems()."""
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar. See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
- """Dict-like keys() that returns a list of names of cookies from the jar.
- See values() and items()."""
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar. See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
- """Dict-like itervalues() that returns an iterator of values of cookies from the jar.
- See iterkeys() and iteritems()."""
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar. See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
- """Dict-like values() that returns a list of values of cookies from the jar.
- See keys() and items()."""
+ """Dict-like values() that returns a list of values of cookies from the
+ jar. See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
- """Dict-like iteritems() that returns an iterator of name-value tuples from the jar.
- See iterkeys() and itervalues()."""
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar. See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
- """Dict-like items() that returns a list of name-value tuples from the jar.
- See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
- and get a vanilla python dict of key value pairs."""
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. See keys() and values(). Allows client-code to call
+ ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
+ pairs."""
return list(self.iteritems())
def list_domains(self):
@@ -259,8 +262,9 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
- """Takes as an argument an optional domain and path and returns a plain old
- Python dict of name-value pairs of cookies that meet the requirements."""
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
@@ -269,21 +273,24 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return dictionary
def __getitem__(self, name):
- """Dict-like __getitem__() for compatibility with client code. Throws exception
- if there are more than one cookie with name. In that case, use the more
- explicit get() method instead. Caution: operation is O(n), not O(1)."""
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
- """Dict-like __setitem__ for compatibility with client code. Throws exception
- if there is already a cookie of that name in the jar. In that case, use the more
- explicit set() method instead."""
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
- """Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
@@ -300,10 +307,11 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
- """Requests uses this method internally to get cookie values. Takes as args name
- and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
- _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
- if there are conflicting cookies."""
+ """Requests uses this method internally to get cookie values. Takes as
+ args name and optional domain and path. Returns a cookie.value. If
+ there are conflicting cookies, _find arbitrarily chooses one. See
+ _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
@@ -313,10 +321,11 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
- """__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
- Takes as args name and optional domain and path. Returns a cookie.value.
- Throws KeyError if cookie is not found and CookieConflictError if there are
- multiple cookies that match name and optionally domain and path."""
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests. Takes as args name and optional domain and
+ path. Returns a cookie.value. Throws KeyError if cookie is not found
+ and CookieConflictError if there are multiple cookies that match name
+ and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
@@ -440,7 +449,7 @@ def merge_cookies(cookiejar, cookies):
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
-
+
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
diff --git a/lib/requests/exceptions.py b/lib/requests/exceptions.py
index a4ee9d63..89135a80 100644
--- a/lib/requests/exceptions.py
+++ b/lib/requests/exceptions.py
@@ -44,7 +44,23 @@ class SSLError(ConnectionError):
class Timeout(RequestException):
- """The request timed out."""
+ """The request timed out.
+
+ Catching this error will catch both
+ :exc:`~requests.exceptions.ConnectTimeout` and
+ :exc:`~requests.exceptions.ReadTimeout` errors.
+ """
+
+
+class ConnectTimeout(ConnectionError, Timeout):
+ """The request timed out while trying to connect to the remote server.
+
+ Requests that produced this error are safe to retry.
+ """
+
+
+class ReadTimeout(Timeout):
+ """The server did not send any data in the allotted amount of time."""
class URLRequired(RequestException):
@@ -73,3 +89,11 @@ class ChunkedEncodingError(RequestException):
class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content"""
+
+
+class StreamConsumedError(RequestException, TypeError):
+ """The content for this response was already consumed"""
+
+
+class RetryError(RequestException):
+ """Custom retries logic failed"""
diff --git a/lib/requests/models.py b/lib/requests/models.py
index e2fa09f8..b728c84e 100644
--- a/lib/requests/models.py
+++ b/lib/requests/models.py
@@ -19,31 +19,35 @@ from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
-from .packages.urllib3.exceptions import DecodeError
+from .packages.urllib3.exceptions import (
+ DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
- HTTPError, RequestException, MissingSchema, InvalidURL,
- ChunkedEncodingError, ContentDecodingError)
+ HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
+ ContentDecodingError, ConnectionError, StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
- is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
+ is_py2, chardet, json, builtin_str, basestring)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
- codes.moved, # 301
- codes.found, # 302
- codes.other, # 303
- codes.temporary_moved, # 307
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_redirect, # 307
+ codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
+json_dumps = json.dumps
+
class RequestEncodingMixin(object):
@property
@@ -187,7 +191,8 @@ class Request(RequestHooksMixin):
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
- :param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
+ :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
+ :param json: json for the body to attach to the request (if data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
@@ -210,7 +215,8 @@ class Request(RequestHooksMixin):
params=None,
auth=None,
cookies=None,
- hooks=None):
+ hooks=None,
+ json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
@@ -228,6 +234,7 @@ class Request(RequestHooksMixin):
self.headers = headers
self.files = files
self.data = data
+ self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
@@ -244,6 +251,7 @@ class Request(RequestHooksMixin):
headers=self.headers,
files=self.files,
data=self.data,
+ json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
@@ -287,14 +295,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
- data=None, params=None, auth=None, cookies=None, hooks=None):
+ data=None, params=None, auth=None, cookies=None, hooks=None,
+ json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
- self.prepare_body(data, files)
+ self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
@@ -309,8 +318,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
p = PreparedRequest()
p.method = self.method
p.url = self.url
- p.headers = self.headers.copy()
- p._cookies = self._cookies.copy()
+ p.headers = self.headers.copy() if self.headers is not None else None
+ p._cookies = self._cookies.copy() if self._cookies is not None else None
p.body = self.body
p.hooks = self.hooks
return p
@@ -324,21 +333,27 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
- try:
- url = unicode(url)
- except NameError:
- # We're on Python 3.
- url = str(url)
- except UnicodeDecodeError:
- pass
+ #: We're unable to blindy call unicode/str functions
+ #: as this will include the bytestring indicator (b'')
+ #: on python 3.x.
+ #: https://github.com/kennethreitz/requests/pull/2238
+ if isinstance(url, bytes):
+ url = url.decode('utf8')
+ else:
+ url = unicode(url) if is_py2 else str(url)
- # Don't do any URL preparation for oddball schemes
+ # Don't do any URL preparation for non-HTTP schemes like `mailto`,
+ # `data` etc to work around exceptions from `url_parse`, which
+ # handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
- scheme, auth, host, port, path, query, fragment = parse_url(url)
+ try:
+ scheme, auth, host, port, path, query, fragment = parse_url(url)
+ except LocationParseError as e:
+ raise InvalidURL(*e.args)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
@@ -395,7 +410,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
else:
self.headers = CaseInsensitiveDict()
- def prepare_body(self, data, files):
+ def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
@@ -406,11 +421,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
content_type = None
length = None
+ if json is not None:
+ content_type = 'application/json'
+ body = json_dumps(json)
+
is_stream = all([
hasattr(data, '__iter__'),
- not isinstance(data, basestring),
- not isinstance(data, list),
- not isinstance(data, dict)
+ not isinstance(data, (basestring, list, tuple, dict))
])
try:
@@ -433,9 +450,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if files:
(body, content_type) = self._encode_files(files, data)
else:
- if data:
+ if data and json is None:
body = self._encode_params(data)
- if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
+ if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
@@ -443,7 +460,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
- if (content_type) and (not 'content-type' in self.headers):
+ if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
@@ -457,7 +474,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
- elif self.method not in ('GET', 'HEAD'):
+ elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
@@ -558,6 +575,10 @@ class Response(object):
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
+ #: The :class:`PreparedRequest ` object to which this
+ #: is a response.
+ self.request = None
+
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
@@ -596,7 +617,7 @@ class Response(object):
def ok(self):
try:
self.raise_for_status()
- except RequestException:
+ except HTTPError:
return False
return True
@@ -607,6 +628,11 @@ class Response(object):
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
+ @property
+ def is_permanent_redirect(self):
+ """True if this Response one of the permanant versions of redirect"""
+ return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
+
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
@@ -618,21 +644,22 @@ class Response(object):
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
- """
- if self._content_consumed:
- # simulate reading small chunks of the content
- return iter_slices(self._content, chunk_size)
+ If decode_unicode is True, content will be decoded using the best
+ available encoding based on the response.
+ """
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
- except IncompleteRead as e:
+ except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
+ except ReadTimeoutError as e:
+ raise ConnectionError(e)
except AttributeError:
# Standard file-like object.
while True:
@@ -643,14 +670,21 @@ class Response(object):
self._content_consumed = True
- gen = generate()
+ if self._content_consumed and isinstance(self._content, bool):
+ raise StreamConsumedError()
+ # simulate reading small chunks of the content
+ reused_chunks = iter_slices(self._content, chunk_size)
+
+ stream_chunks = generate()
+
+ chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
- gen = stream_decode_response_unicode(gen, self)
+ chunks = stream_decode_response_unicode(chunks, self)
- return gen
+ return chunks
- def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
+ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
@@ -662,7 +696,11 @@ class Response(object):
if pending is not None:
chunk = pending + chunk
- lines = chunk.splitlines()
+
+ if delimiter:
+ lines = chunk.split(delimiter)
+ else:
+ lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
@@ -793,8 +831,8 @@ class Response(object):
raise HTTPError(http_error_msg, response=self)
def close(self):
- """Closes the underlying file descriptor and releases the connection
- back to the pool.
+ """Releases the connection back to the pool. Once this method has been
+ called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
diff --git a/lib/requests/packages/__init__.py b/lib/requests/packages/__init__.py
index d62c4b71..ec6a9e06 100644
--- a/lib/requests/packages/__init__.py
+++ b/lib/requests/packages/__init__.py
@@ -1,3 +1,95 @@
+"""
+Copyright (c) Donald Stufft, pip, and individual contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
from __future__ import absolute_import
-from . import urllib3
+import sys
+
+
+class VendorAlias(object):
+
+ def __init__(self):
+ self._vendor_name = __name__
+ self._vendor_pkg = self._vendor_name + "."
+
+ def find_module(self, fullname, path=None):
+ if fullname.startswith(self._vendor_pkg):
+ return self
+
+ def load_module(self, name):
+ # Ensure that this only works for the vendored name
+ if not name.startswith(self._vendor_pkg):
+ raise ImportError(
+ "Cannot import %s, must be a subpackage of '%s'." % (
+ name, self._vendor_name,
+ )
+ )
+
+ # Check to see if we already have this item in sys.modules, if we do
+ # then simply return that.
+ if name in sys.modules:
+ return sys.modules[name]
+
+ # Check to see if we can import the vendor name
+ try:
+ # We do this dance here because we want to try and import this
+ # module without hitting a recursion error because of a bunch of
+ # VendorAlias instances on sys.meta_path
+ real_meta_path = sys.meta_path[:]
+ try:
+ sys.meta_path = [
+ m for m in sys.meta_path
+ if not isinstance(m, VendorAlias)
+ ]
+ __import__(name)
+ module = sys.modules[name]
+ finally:
+ # Re-add any additions to sys.meta_path that were made while
+ # during the import we just did, otherwise things like
+ # requests.packages.urllib3.poolmanager will fail.
+ for m in sys.meta_path:
+ if m not in real_meta_path:
+ real_meta_path.append(m)
+
+ # Restore sys.meta_path with any new items.
+ sys.meta_path = real_meta_path
+ except ImportError:
+ # We can't import the vendor name, so we'll try to import the
+ # "real" name.
+ real_name = name[len(self._vendor_pkg):]
+ try:
+ __import__(real_name)
+ module = sys.modules[real_name]
+ except ImportError:
+ raise ImportError("No module named '%s'" % (name,))
+
+ # If we've gotten here we've found the module we're looking for, either
+ # as part of our vendored package, or as the real name, so we'll add
+ # it to sys.modules as the vendored name so that we don't have to do
+ # the lookup again.
+ sys.modules[name] = module
+
+ # Finally, return the loaded module
+ return module
+
+
+sys.meta_path.append(VendorAlias())
diff --git a/lib/requests/packages/chardet/__init__.py b/lib/requests/packages/chardet/__init__.py
index e4f0799d..82c2a48d 100644
--- a/lib/requests/packages/chardet/__init__.py
+++ b/lib/requests/packages/chardet/__init__.py
@@ -15,7 +15,7 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-__version__ = "2.2.1"
+__version__ = "2.3.0"
from sys import version_info
diff --git a/lib/requests/packages/chardet/chardetect.py b/lib/requests/packages/chardet/chardetect.py
old mode 100644
new mode 100755
index ecd0163b..ffe892f2
--- a/lib/requests/packages/chardet/chardetect.py
+++ b/lib/requests/packages/chardet/chardetect.py
@@ -12,34 +12,68 @@ Example::
If no paths are provided, it takes its input from stdin.
"""
-from io import open
-from sys import argv, stdin
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import sys
+from io import open
+
+from chardet import __version__
from chardet.universaldetector import UniversalDetector
-def description_of(file, name='stdin'):
- """Return a string describing the probable encoding of a file."""
+def description_of(lines, name='stdin'):
+ """
+ Return a string describing the probable encoding of a file or
+ list of strings.
+
+ :param lines: The lines to get the encoding of.
+ :type lines: Iterable of bytes
+ :param name: Name of file or collection of lines
+ :type name: str
+ """
u = UniversalDetector()
- for line in file:
+ for line in lines:
u.feed(line)
u.close()
result = u.result
if result['encoding']:
- return '%s: %s with confidence %s' % (name,
- result['encoding'],
- result['confidence'])
+ return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
+ result['confidence'])
else:
- return '%s: no result' % name
+ return '{0}: no result'.format(name)
-def main():
- if len(argv) <= 1:
- print(description_of(stdin))
- else:
- for path in argv[1:]:
- with open(path, 'rb') as f:
- print(description_of(f, path))
+def main(argv=None):
+ '''
+ Handles command line arguments and gets things started.
+
+ :param argv: List of arguments, as if specified on the command-line.
+ If None, ``sys.argv[1:]`` is used instead.
+ :type argv: list of str
+ '''
+ # Get command line arguments
+ parser = argparse.ArgumentParser(
+ description="Takes one or more file paths and reports their detected \
+ encodings",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ conflict_handler='resolve')
+ parser.add_argument('input',
+ help='File whose encoding we would like to determine.',
+ type=argparse.FileType('rb'), nargs='*',
+ default=[sys.stdin])
+ parser.add_argument('--version', action='version',
+ version='%(prog)s {0}'.format(__version__))
+ args = parser.parse_args(argv)
+
+ for f in args.input:
+ if f.isatty():
+ print("You are running chardetect interactively. Press " +
+ "CTRL-D twice at the start of a blank line to signal the " +
+ "end of your input. If you want help, run chardetect " +
+ "--help\n", file=sys.stderr)
+ print(description_of(f, f.name))
if __name__ == '__main__':
diff --git a/lib/requests/packages/chardet/jpcntx.py b/lib/requests/packages/chardet/jpcntx.py
index f7f69ba4..59aeb6a8 100644
--- a/lib/requests/packages/chardet/jpcntx.py
+++ b/lib/requests/packages/chardet/jpcntx.py
@@ -177,6 +177,12 @@ class JapaneseContextAnalysis:
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
+ def __init__(self):
+ self.charset_name = "SHIFT_JIS"
+
+ def get_charset_name(self):
+ return self.charset_name
+
def get_order(self, aBuf):
if not aBuf:
return -1, 1
@@ -184,6 +190,8 @@ class SJISContextAnalysis(JapaneseContextAnalysis):
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
+ if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
+ self.charset_name = "CP932"
else:
charLen = 1
diff --git a/lib/requests/packages/chardet/latin1prober.py b/lib/requests/packages/chardet/latin1prober.py
index ad695f57..eef35735 100644
--- a/lib/requests/packages/chardet/latin1prober.py
+++ b/lib/requests/packages/chardet/latin1prober.py
@@ -129,11 +129,11 @@ class Latin1Prober(CharSetProber):
if total < 0.01:
confidence = 0.0
else:
- confidence = ((self._mFreqCounter[3] / total)
- - (self._mFreqCounter[1] * 20.0 / total))
+ confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
+ / total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
- confidence = confidence * 0.5
+ confidence = confidence * 0.73
return confidence
diff --git a/lib/requests/packages/chardet/mbcssm.py b/lib/requests/packages/chardet/mbcssm.py
index 3f93cfb0..efe678ca 100644
--- a/lib/requests/packages/chardet/mbcssm.py
+++ b/lib/requests/packages/chardet/mbcssm.py
@@ -353,7 +353,7 @@ SJIS_cls = (
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
- 3,3,3,3,3,3,3,3, # 80 - 87
+ 3,3,3,3,3,2,2,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
@@ -369,9 +369,8 @@ SJIS_cls = (
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
- 4,4,4,4,4,4,4,4, # f0 - f7
- 4,4,4,4,4,0,0,0 # f8 - ff
-)
+ 3,3,3,3,3,3,3,3, # f0 - f7
+ 3,3,3,3,3,0,0,0) # f8 - ff
SJIS_st = (
@@ -571,5 +570,3 @@ UTF8SMModel = {'classTable': UTF8_cls,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
-
-# flake8: noqa
diff --git a/lib/requests/packages/chardet/sjisprober.py b/lib/requests/packages/chardet/sjisprober.py
index b173614e..cd0e9e70 100644
--- a/lib/requests/packages/chardet/sjisprober.py
+++ b/lib/requests/packages/chardet/sjisprober.py
@@ -47,7 +47,7 @@ class SJISProber(MultiByteCharSetProber):
self._mContextAnalyzer.reset()
def get_charset_name(self):
- return "SHIFT_JIS"
+ return self._mContextAnalyzer.get_charset_name()
def feed(self, aBuf):
aLen = len(aBuf)
diff --git a/lib/requests/packages/chardet/universaldetector.py b/lib/requests/packages/chardet/universaldetector.py
index 9a03ad3d..476522b9 100644
--- a/lib/requests/packages/chardet/universaldetector.py
+++ b/lib/requests/packages/chardet/universaldetector.py
@@ -71,9 +71,9 @@ class UniversalDetector:
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
- if aBuf[:3] == codecs.BOM:
+ if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
- self.result = {'encoding': "UTF-8", 'confidence': 1.0}
+ self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
diff --git a/lib/requests/packages/urllib3/__init__.py b/lib/requests/packages/urllib3/__init__.py
index 73071f70..d7592ae7 100644
--- a/lib/requests/packages/urllib3/__init__.py
+++ b/lib/requests/packages/urllib3/__init__.py
@@ -1,9 +1,3 @@
-# urllib3/__init__.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
@@ -23,7 +17,10 @@ from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
-from .util import make_headers, get_host, Timeout
+from .util.request import make_headers
+from .util.url import get_host
+from .util.timeout import Timeout
+from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
@@ -51,8 +48,19 @@ def add_stderr_logger(level=logging.DEBUG):
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
- logger.debug('Added an stderr logging handler to logger: %s' % __name__)
+ logger.debug('Added a stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
+
+
+# Set security warning to always go off by default.
+import warnings
+warnings.simplefilter('always', exceptions.SecurityWarning)
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter('ignore', category)
diff --git a/lib/requests/packages/urllib3/_collections.py b/lib/requests/packages/urllib3/_collections.py
index 5907b0dc..784342a4 100644
--- a/lib/requests/packages/urllib3/_collections.py
+++ b/lib/requests/packages/urllib3/_collections.py
@@ -1,10 +1,4 @@
-# urllib3/_collections.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from collections import MutableMapping
+from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
@@ -20,9 +14,10 @@ try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
+from .packages.six import iterkeys, itervalues
-__all__ = ['RecentlyUsedContainer']
+__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
_Null = object()
@@ -90,8 +85,7 @@ class RecentlyUsedContainer(MutableMapping):
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
- # under Python 2, this copies the list of values twice :-|
- values = list(self._container.values())
+ values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
@@ -100,4 +94,105 @@ class RecentlyUsedContainer(MutableMapping):
def keys(self):
with self.lock:
- return self._container.keys()
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+
+ If you want to access the raw headers with their original casing
+ for debugging purposes you can access the private ``._data`` attribute
+ which is a normal python ``dict`` that maps the case-insensitive key to a
+ list of tuples stored as (case-sensitive-original-name, value). Using the
+ structure from above as our example:
+
+ >>> headers._data
+ {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
+ 'content-length': [('content-length', '7')]}
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ self._data = {}
+ if headers is None:
+ headers = {}
+ self.update(headers, **kwargs)
+
+ def add(self, key, value):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ self._data.setdefault(key.lower(), []).append((key, value))
+
+ def getlist(self, key):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ return self[key].split(', ') if key in self else []
+
+ def copy(self):
+ h = HTTPHeaderDict()
+ for key in self._data:
+ for rawkey, value in self._data[key]:
+ h.add(rawkey, value)
+ return h
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping):
+ return False
+ other = HTTPHeaderDict(other)
+ return dict((k1, self[k1]) for k1 in self._data) == \
+ dict((k2, other[k2]) for k2 in other._data)
+
+ def __getitem__(self, key):
+ values = self._data[key.lower()]
+ return ', '.join(value[1] for value in values)
+
+ def __setitem__(self, key, value):
+ self._data[key.lower()] = [(key, value)]
+
+ def __delitem__(self, key):
+ del self._data[key.lower()]
+
+ def __len__(self):
+ return len(self._data)
+
+ def __iter__(self):
+ for headers in itervalues(self._data):
+ yield headers[0][0]
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
diff --git a/lib/requests/packages/urllib3/connection.py b/lib/requests/packages/urllib3/connection.py
index c7d5b77d..e5de769d 100644
--- a/lib/requests/packages/urllib3/connection.py
+++ b/lib/requests/packages/urllib3/connection.py
@@ -1,88 +1,155 @@
-# urllib3/connection.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
+import datetime
+import sys
import socket
from socket import timeout as SocketTimeout
+import warnings
+from .packages import six
-try: # Python 3
+try: # Python 3
from http.client import HTTPConnection as _HTTPConnection, HTTPException
except ImportError:
from httplib import HTTPConnection as _HTTPConnection, HTTPException
+
class DummyConnection(object):
"Used to detect a failed ConnectionCls import."
pass
-try: # Compiled with SSL?
- ssl = None
+
+try: # Compiled with SSL?
HTTPSConnection = DummyConnection
+ import ssl
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
class BaseSSLError(BaseException):
pass
- try: # Python 3
- from http.client import HTTPSConnection as _HTTPSConnection
- except ImportError:
- from httplib import HTTPSConnection as _HTTPSConnection
- import ssl
- BaseSSLError = ssl.SSLError
+try: # Python 3:
+ # Not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError: # Python 2:
+ class ConnectionError(Exception):
+ pass
-except (ImportError, AttributeError): # Platform-specific: No SSL.
- pass
from .exceptions import (
ConnectTimeoutError,
+ SystemTimeWarning,
+ SecurityWarning,
)
from .packages.ssl_match_hostname import match_hostname
-from .util import (
- assert_fingerprint,
+
+from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
+ assert_fingerprint,
)
+from .util import connection
+
port_by_scheme = {
'http': 80,
'https': 443,
}
+RECENT_DATE = datetime.date(2014, 1, 1)
+
class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on httplib.HTTPConnection but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+
+ .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
+
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass::
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
default_port = port_by_scheme['http']
- # By default, disable Nagle's Algorithm.
- tcp_nodelay = 1
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if six.PY3: # Python 3
+ kw.pop('strict', None)
+
+ # Pre-set source_address in case we have an older Python like 2.6.
+ self.source_address = kw.get('source_address')
+
+ if sys.version_info < (2, 7): # Python 2.6
+ # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
+ # not newer versions. We can still use it when creating a
+ # connection though, so we pop it *after* we have saved it as
+ # self.source_address.
+ kw.pop('source_address', None)
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop('socket_options', self.default_socket_options)
+
+ # Superclass also sets self.source_address in Python 2.7+.
+ _HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
- """ Establish a socket connection and set nodelay settings on it
+ """ Establish a socket connection and set nodelay settings on it.
- :return: a new socket connection
+ :return: New socket connection.
"""
+ extra_kw = {}
+ if self.source_address:
+ extra_kw['source_address'] = self.source_address
+
+ if self.socket_options:
+ extra_kw['socket_options'] = self.socket_options
+
try:
- conn = socket.create_connection(
- (self.host, self.port),
- self.timeout,
- self.source_address,
- )
- except AttributeError: # Python 2.6
- conn = socket.create_connection(
- (self.host, self.port),
- self.timeout,
- )
- conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
- self.tcp_nodelay)
+ conn = connection.create_connection(
+ (self.host, self.port), self.timeout, **extra_kw)
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self, "Connection to %s timed out. (connect timeout=%s)" %
+ (self.host, self.timeout))
+
return conn
def _prepare_conn(self, conn):
self.sock = conn
+ # the _tunnel_host attribute was added in python 2.6.3 (via
+ # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
+ # not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
def connect(self):
conn = self._new_conn()
@@ -93,15 +160,18 @@ class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
- source_address=None):
- try:
- HTTPConnection.__init__(self, host, port, strict, timeout, source_address)
- except TypeError: # Python 2.6
- HTTPConnection.__init__(self, host, port, strict, timeout)
+ strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
+
+ HTTPConnection.__init__(self, host, port, strict=strict,
+ timeout=timeout, **kw)
+
self.key_file = key_file
self.cert_file = cert_file
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = 'https'
+
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
@@ -116,6 +186,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
cert_reqs = None
ca_certs = None
ssl_version = None
+ assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
@@ -130,46 +201,59 @@ class VerifiedHTTPSConnection(HTTPSConnection):
def connect(self):
# Add certificate verification
- try:
- sock = socket.create_connection(
- address=(self.host, self.port),
- timeout=self.timeout,
- )
- except SocketTimeout:
- raise ConnectTimeoutError(
- self, "Connection to %s timed out. (connect timeout=%s)" %
- (self.host, self.timeout))
-
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,
- self.tcp_nodelay)
+ conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
- # the _tunnel_host attribute was added in python 2.6.3 (via
- # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
- # not have them.
+ hostname = self.host
if getattr(self, '_tunnel_host', None):
- self.sock = sock
+ # _tunnel_host was added in Python 2.6.3
+ # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
+
+ self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn((
+ 'System time is way off (before {0}). This will probably '
+ 'lead to SSL verification errors').format(RECENT_DATE),
+ SystemTimeWarning
+ )
# Wrap socket using verification with the root certs in
# trusted_root_certs
- self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file,
+ self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
- server_hostname=self.host,
+ server_hostname=hostname,
ssl_version=resolved_ssl_version)
- if resolved_cert_reqs != ssl.CERT_NONE:
- if self.assert_fingerprint:
- assert_fingerprint(self.sock.getpeercert(binary_form=True),
- self.assert_fingerprint)
- elif self.assert_hostname is not False:
- match_hostname(self.sock.getpeercert(),
- self.assert_hostname or self.host)
+ if self.assert_fingerprint:
+ assert_fingerprint(self.sock.getpeercert(binary_form=True),
+ self.assert_fingerprint)
+ elif resolved_cert_reqs != ssl.CERT_NONE \
+ and self.assert_hostname is not False:
+ cert = self.sock.getpeercert()
+ if not cert.get('subjectAltName', ()):
+ warnings.warn((
+ 'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. '
+ 'This feature is being removed by major browsers and deprecated by RFC 2818. '
+ '(See https://github.com/shazow/urllib3/issues/497 for details.)'),
+ SecurityWarning
+ )
+ match_hostname(cert, self.assert_hostname or hostname)
+
+ self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
+ or self.assert_fingerprint is not None)
if ssl:
diff --git a/lib/requests/packages/urllib3/connectionpool.py b/lib/requests/packages/urllib3/connectionpool.py
index 243d700e..8bdf228f 100644
--- a/lib/requests/packages/urllib3/connectionpool.py
+++ b/lib/requests/packages/urllib3/connectionpool.py
@@ -1,16 +1,12 @@
-# urllib3/connectionpool.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
import errno
import logging
+import sys
+import warnings
from socket import error as SocketError, timeout as SocketTimeout
import socket
-try: # Python 3
+try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
@@ -19,14 +15,16 @@ except ImportError:
from .exceptions import (
ClosedPoolError,
- ConnectTimeoutError,
+ ProtocolError,
EmptyPoolError,
HostChangedError,
+ LocationValueError,
MaxRetryError,
+ ProxyError,
+ ReadTimeoutError,
SSLError,
TimeoutError,
- ReadTimeoutError,
- ProxyError,
+ InsecureRequestWarning,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
@@ -34,16 +32,15 @@ from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
- HTTPException, BaseSSLError,
+ HTTPException, BaseSSLError, ConnectionError
)
from .request import RequestMethods
from .response import HTTPResponse
-from .util import (
- assert_fingerprint,
- get_host,
- is_connection_dropped,
- Timeout,
-)
+
+from .util.connection import is_connection_dropped
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host
xrange = six.moves.xrange
@@ -52,8 +49,8 @@ log = logging.getLogger(__name__)
_Default = object()
-## Pool objects
+## Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
@@ -64,10 +61,11 @@ class ConnectionPool(object):
QueueCls = LifoQueue
def __init__(self, host, port=None):
- # httplib doesn't like it when we include brackets in ipv6 addresses
- host = host.strip('[]')
+ if not host:
+ raise LocationValueError("No host specified.")
- self.host = host
+ # httplib doesn't like it when we include brackets in ipv6 addresses
+ self.host = host.strip('[]')
self.port = port
def __str__(self):
@@ -77,6 +75,7 @@ class ConnectionPool(object):
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
+
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
@@ -121,6 +120,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
Headers to include with all requests, unless other headers are given
explicitly.
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.connectionpool.ProxyManager`"
@@ -128,6 +130,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.connectionpool.ProxyManager`"
+
+ :param \**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = 'http'
@@ -135,18 +141,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
- headers=None, _proxy=None, _proxy_headers=None):
+ headers=None, retries=None,
+ _proxy=None, _proxy_headers=None,
+ **conn_kw):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
- # This is for backwards compatibility and can be removed once a timeout
- # can only be set to a Timeout object
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
+ if retries is None:
+ retries = Retry.DEFAULT
+
self.timeout = timeout
+ self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
@@ -161,6 +171,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault('socket_options', [])
def _new_conn(self):
"""
@@ -170,17 +187,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
log.info("Starting new HTTP connection (%d): %s" %
(self.num_connections, self.host))
- extra_params = {}
- if not six.PY3: # Python 2
- extra_params['strict'] = self.strict
-
conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout,
- **extra_params)
- if self.proxy is not None:
- # Enable Nagle's algorithm for proxies, to avoid packet
- # fragmentation.
- conn.tcp_nodelay = 0
+ strict=self.strict, **self.conn_kw)
return conn
def _get_conn(self, timeout=None):
@@ -199,7 +208,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
try:
conn = self.pool.get(block=self.block, timeout=timeout)
- except AttributeError: # self.pool is None
+ except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
@@ -213,6 +222,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
+ if getattr(conn, 'auto_open', 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # httplib._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
return conn or self._new_conn()
@@ -232,19 +246,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
try:
self.pool.put(conn, block=False)
- return # Everything is dandy, done.
+ return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
- log.warning("HttpConnectionPool is full, discarding connection: %s"
- % self.host)
+ log.warning(
+ "Connection pool is full, discarding connection: %s" %
+ self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
@@ -257,6 +282,23 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# can be removed later
return Timeout.from_float(timeout)
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, 'errno') and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw):
"""
@@ -276,23 +318,26 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+ # Trigger any extra validation we need to do.
try:
- timeout_obj.start_connect()
- conn.timeout = timeout_obj.connect_timeout
- # conn.request() calls httplib.*.request, not the method in
- # urllib3.request. It also calls makefile (recv) on the socket.
- conn.request(method, url, **httplib_request_kw)
- except SocketTimeout:
- raise ConnectTimeoutError(
- self, "Connection to %s timed out. (connect timeout=%s)" %
- (self.host, timeout_obj.connect_timeout))
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls httplib.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
- if hasattr(conn, 'sock'):
+ if getattr(conn, 'sock', None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
@@ -300,41 +345,20 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
- self, url,
- "Read timed out. (read timeout=%s)" % read_timeout)
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
- else: # None or a value
+ else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
- try: # Python 2.7+, use buffering of HTTP responses
+ try: # Python 2.7+, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
- except TypeError: # Python 2.6 and older
+ except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
- except SocketTimeout:
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-
- except BaseSSLError as e:
- # Catch possible read timeouts thrown as SSL errors. If not the
- # case, rethrow the original. We need to do this because of:
- # http://bugs.python.org/issue10272
- if 'timed out' in str(e) or \
- 'did not complete (read)' in str(e): # Python 2.6
- raise ReadTimeoutError(self, url, "Read timed out.")
-
- raise
-
- except SocketError as e: # Platform-specific: Python 2
- # See the above comment about EAGAIN in Python 3. In Python 2 we
- # have to specifically catch it and throw the timeout error
- if e.errno in _blocking_errnos:
- raise ReadTimeoutError(
- self, url,
- "Read timed out. (read timeout=%s)" % read_timeout)
-
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
@@ -358,7 +382,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close()
except Empty:
- pass # Done.
+ pass # Done.
def is_same_host(self, url):
"""
@@ -379,7 +403,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
return (scheme, host, port) == (self.scheme, self.host, self.port)
- def urlopen(self, method, url, body=None, headers=None, retries=3,
+ def urlopen(self, method, url, body=None, headers=None, retries=None,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, **response_kw):
"""
@@ -413,11 +437,25 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
these headers completely replace any pool-specific headers.
:param retries:
- Number of retries to allow before raising a MaxRetryError exception.
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
- 303, 307, 308). Each redirect counts as a retry.
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
@@ -451,15 +489,15 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if headers is None:
headers = self.headers
- if retries < 0:
- raise MaxRetryError(self, url)
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
- raise HostChangedError(self, url, retries - 1)
+ raise HostChangedError(self, url, retries)
conn = None
@@ -470,13 +508,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
headers = headers.copy()
headers.update(self.proxy_headers)
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
try:
- # Request a connection from the queue
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
- # Make the request on the httplib connection object
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
- timeout=timeout,
+ timeout=timeout_obj,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
@@ -497,38 +546,38 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# ``response.read()``)
except Empty:
- # Timed out by queue
+ # Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
- except BaseSSLError as e:
+ except (BaseSSLError, CertificateError) as e:
+ # Close the connection. If a connection is reused on which there
+ # was a Certificate error, the next request will certainly raise
+ # another Certificate error.
+ if conn:
+ conn.close()
+ conn = None
raise SSLError(e)
- except CertificateError as e:
- # Name mismatch
- raise SSLError(e)
+ except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
+ if conn:
+ # Discard the connection for these exceptions. It will be
+ # be replaced during the next _get_conn() call.
+ conn.close()
+ conn = None
- except TimeoutError as e:
- # Connection broken, discard.
- conn = None
- # Save the error off for retry logic.
+ stacktrace = sys.exc_info()[2]
+ if isinstance(e, SocketError) and self.proxy:
+ e = ProxyError('Cannot connect to proxy.', e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError('Connection aborted.', e)
+
+ retries = retries.increment(method, url, error=e,
+ _pool=self, _stacktrace=stacktrace)
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
err = e
- if retries == 0:
- raise
-
- except (HTTPException, SocketError) as e:
- # Connection broken, discard. It will be replaced next _get_conn().
- conn = None
- # This is necessary so we can access e below
- err = e
-
- if retries == 0:
- if isinstance(e, SocketError) and self.proxy is not None:
- raise ProxyError('Cannot connect to proxy. '
- 'Socket error: %s.' % e)
- else:
- raise MaxRetryError(self, url, e)
-
finally:
if release_conn:
# Put the connection back to be reused. If the connection is
@@ -538,9 +587,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if not conn:
# Try again
- log.warn("Retrying (%d attempts remain) after connection "
- "broken by '%r': %s" % (retries, err, url))
- return self.urlopen(method, url, body, headers, retries - 1,
+ log.warning("Retrying (%r) after connection "
+ "broken by '%r': %s" % (retries, err, url))
+ return self.urlopen(method, url, body, headers, retries,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
@@ -550,11 +599,31 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if redirect_location:
if response.status == 303:
method = 'GET'
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise
+ return response
+
log.info("Redirecting %s -> %s" % (url, redirect_location))
return self.urlopen(method, redirect_location, body, headers,
- retries - 1, redirect, assert_same_host,
- timeout=timeout, pool_timeout=pool_timeout,
- release_conn=release_conn, **response_kw)
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
+
+ # Check if we should retry the HTTP response.
+ if retries.is_forced_retry(method, status_code=response.status):
+ retries = retries.increment(method, url, response=response, _pool=self)
+ retries.sleep()
+ log.info("Forced retry: %s" % url)
+ return self.urlopen(method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
return response
@@ -581,15 +650,17 @@ class HTTPSConnectionPool(HTTPConnectionPool):
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None,
- strict=False, timeout=None, maxsize=1,
- block=False, headers=None,
+ strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
+ block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
- assert_hostname=None, assert_fingerprint=None):
+ assert_hostname=None, assert_fingerprint=None,
+ **conn_kw):
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
- block, headers, _proxy, _proxy_headers)
+ block, headers, retries, _proxy, _proxy_headers,
+ **conn_kw)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
@@ -613,19 +684,26 @@ class HTTPSConnectionPool(HTTPConnectionPool):
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
- if self.proxy is not None:
- # Python 2.7+
- try:
- set_tunnel = conn.set_tunnel
- except AttributeError: # Platform-specific: Python 2.6
- set_tunnel = conn._set_tunnel
- set_tunnel(self.host, self.port, self.proxy_headers)
- # Establish tunnel connection early, because otherwise httplib
- # would improperly set Host: header to proxy's IP:port.
- conn.connect()
-
return conn
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ # Python 2.7+
+ try:
+ set_tunnel = conn.set_tunnel
+ except AttributeError: # Platform-specific: Python 2.6
+ set_tunnel = conn._set_tunnel
+
+ if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
+ set_tunnel(self.host, self.port)
+ else:
+ set_tunnel(self.host, self.port, self.proxy_headers)
+
+ conn.connect()
+
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
@@ -645,20 +723,29 @@ class HTTPSConnectionPool(HTTPConnectionPool):
actual_host = self.proxy.host
actual_port = self.proxy.port
- extra_params = {}
- if not six.PY3: # Python 2
- extra_params['strict'] = self.strict
-
conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout,
- **extra_params)
- if self.proxy is not None:
- # Enable Nagle's algorithm for proxies, to avoid packet
- # fragmentation.
- conn.tcp_nodelay = 0
+ strict=self.strict, **self.conn_kw)
return self._prepare_conn(conn)
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn((
+ 'Unverified HTTPS request is being made. '
+ 'Adding certificate verification is strongly advised. See: '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'),
+ InsecureRequestWarning)
+
def connection_from_url(url, **kw):
"""
@@ -675,7 +762,7 @@ def connection_from_url(url, **kw):
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
- Example: ::
+ Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
diff --git a/lib/requests/packages/urllib3/contrib/ntlmpool.py b/lib/requests/packages/urllib3/contrib/ntlmpool.py
index b8cd9330..c6b266f5 100644
--- a/lib/requests/packages/urllib3/contrib/ntlmpool.py
+++ b/lib/requests/packages/urllib3/contrib/ntlmpool.py
@@ -1,9 +1,3 @@
-# urllib3/contrib/ntlmpool.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
"""
NTLM authenticating pool, contributed by erikcederstran
diff --git a/lib/requests/packages/urllib3/contrib/pyopenssl.py b/lib/requests/packages/urllib3/contrib/pyopenssl.py
index c3df278b..ee657fb3 100644
--- a/lib/requests/packages/urllib3/contrib/pyopenssl.py
+++ b/lib/requests/packages/urllib3/contrib/pyopenssl.py
@@ -1,4 +1,7 @@
-'''SSL with SNI_-support for Python 2.
+'''SSL with SNI_-support for Python 2. Follow these instructions if you would
+like to verify SSL certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
This needs the following packages installed:
@@ -6,9 +9,15 @@ This needs the following packages installed:
* ndg-httpsclient (tested with 0.3.2)
* pyasn1 (tested with 0.1.6)
-To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`.
-This can be done in a ``sitecustomize`` module, or at any other time before
-your application begins using ``urllib3``, like this::
+You can install them with the following command:
+
+ pip install pyopenssl ndg-httpsclient pyasn1
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this::
try:
import urllib3.contrib.pyopenssl
@@ -20,7 +29,7 @@ Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
-encryption in Python 2 (see `CRIME attack`_).
+compression in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
@@ -29,24 +38,26 @@ Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
- Default: ``EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM EECDH+ECDSA+SHA256
- EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA EECDH RC4 !aNULL !eNULL !LOW !3DES
- !MD5 !EXP !PSK !SRP !DSS'``
+ Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
+ ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
-from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
-from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
+try:
+ from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
+ from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
+except SyntaxError as e:
+ raise ImportError(e)
+
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
-from socket import _fileobject
+from socket import _fileobject, timeout
import ssl
import select
-from cStringIO import StringIO
from .. import connection
from .. import util
@@ -74,12 +85,22 @@ _openssl_verify = {
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
-# Default SSL/TLS cipher list.
-# Recommendation by https://community.qualys.com/blogs/securitylabs/2013/08/05/
-# configuring-apache-nginx-and-openssl-for-forward-secrecy
-DEFAULT_SSL_CIPHER_LIST = 'EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM ' + \
- 'EECDH+ECDSA+SHA256 EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA ' + \
- 'EECDH RC4 !aNULL !eNULL !LOW !3DES !MD5 !EXP !PSK !SRP !DSS'
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM over any AES-CBC for better performance and security,
+# - use 3DES as fallback which is secure but slow,
+# - disable NULL authentication, MD5 MACs and DSS for security reasons.
+DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
+ "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
+ "!aNULL:!MD5:!DSS"
orig_util_HAS_SNI = util.HAS_SNI
@@ -142,193 +163,73 @@ def get_subj_alt_name(peer_cert):
return dns_name
-class fileobject(_fileobject):
-
- def read(self, size=-1):
- # Use max, disallow tiny reads in a loop as they are very inefficient.
- # We never leave read() with any leftover data from a new recv() call
- # in our internal buffer.
- rbufsize = max(self._rbufsize, self.default_bufsize)
- # Our use of StringIO rather than lists of string objects returned by
- # recv() minimizes memory usage and fragmentation that occurs when
- # rbufsize is large compared to the typical return value of recv().
- buf = self._rbuf
- buf.seek(0, 2) # seek end
- if size < 0:
- # Read until EOF
- self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
- while True:
- try:
- data = self._sock.recv(rbufsize)
- except OpenSSL.SSL.WantReadError:
- continue
- if not data:
- break
- buf.write(data)
- return buf.getvalue()
- else:
- # Read until size bytes or EOF seen, whichever comes first
- buf_len = buf.tell()
- if buf_len >= size:
- # Already have size bytes in our buffer? Extract and return.
- buf.seek(0)
- rv = buf.read(size)
- self._rbuf = StringIO()
- self._rbuf.write(buf.read())
- return rv
-
- self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
- while True:
- left = size - buf_len
- # recv() will malloc the amount of memory given as its
- # parameter even though it often returns much less data
- # than that. The returned data string is short lived
- # as we copy it into a StringIO and free it. This avoids
- # fragmentation issues on many platforms.
- try:
- data = self._sock.recv(left)
- except OpenSSL.SSL.WantReadError:
- continue
- if not data:
- break
- n = len(data)
- if n == size and not buf_len:
- # Shortcut. Avoid buffer data copies when:
- # - We have no data in our buffer.
- # AND
- # - Our call to recv returned exactly the
- # number of bytes we were asked to read.
- return data
- if n == left:
- buf.write(data)
- del data # explicit free
- break
- assert n <= left, "recv(%d) returned %d bytes" % (left, n)
- buf.write(data)
- buf_len += n
- del data # explicit free
- #assert buf_len == buf.tell()
- return buf.getvalue()
-
- def readline(self, size=-1):
- buf = self._rbuf
- buf.seek(0, 2) # seek end
- if buf.tell() > 0:
- # check if we already have it in our buffer
- buf.seek(0)
- bline = buf.readline(size)
- if bline.endswith('\n') or len(bline) == size:
- self._rbuf = StringIO()
- self._rbuf.write(buf.read())
- return bline
- del bline
- if size < 0:
- # Read until \n or EOF, whichever comes first
- if self._rbufsize <= 1:
- # Speed up unbuffered case
- buf.seek(0)
- buffers = [buf.read()]
- self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
- data = None
- recv = self._sock.recv
- while True:
- try:
- while data != "\n":
- data = recv(1)
- if not data:
- break
- buffers.append(data)
- except OpenSSL.SSL.WantReadError:
- continue
- break
- return "".join(buffers)
-
- buf.seek(0, 2) # seek end
- self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
- while True:
- try:
- data = self._sock.recv(self._rbufsize)
- except OpenSSL.SSL.WantReadError:
- continue
- if not data:
- break
- nl = data.find('\n')
- if nl >= 0:
- nl += 1
- buf.write(data[:nl])
- self._rbuf.write(data[nl:])
- del data
- break
- buf.write(data)
- return buf.getvalue()
- else:
- # Read until size bytes or \n or EOF seen, whichever comes first
- buf.seek(0, 2) # seek end
- buf_len = buf.tell()
- if buf_len >= size:
- buf.seek(0)
- rv = buf.read(size)
- self._rbuf = StringIO()
- self._rbuf.write(buf.read())
- return rv
- self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
- while True:
- try:
- data = self._sock.recv(self._rbufsize)
- except OpenSSL.SSL.WantReadError:
- continue
- if not data:
- break
- left = size - buf_len
- # did we just receive a newline?
- nl = data.find('\n', 0, left)
- if nl >= 0:
- nl += 1
- # save the excess data to _rbuf
- self._rbuf.write(data[nl:])
- if buf_len:
- buf.write(data[:nl])
- break
- else:
- # Shortcut. Avoid data copy through buf when returning
- # a substring of our first recv().
- return data[:nl]
- n = len(data)
- if n == size and not buf_len:
- # Shortcut. Avoid data copy through buf when
- # returning exactly all of our first recv().
- return data
- if n >= left:
- buf.write(data[:left])
- self._rbuf.write(data[left:])
- break
- buf.write(data)
- buf_len += n
- #assert buf_len == buf.tell()
- return buf.getvalue()
-
-
class WrappedSocket(object):
- '''API-compatibility wrapper for Python OpenSSL's Connection-class.'''
+ '''API-compatibility wrapper for Python OpenSSL's Connection-class.
- def __init__(self, connection, socket):
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ '''
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
def fileno(self):
return self.socket.fileno()
def makefile(self, mode, bufsize=-1):
- return fileobject(self.connection, mode, bufsize)
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
+ return b''
+ else:
+ raise
+ except OpenSSL.SSL.ZeroReturnError as e:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ rd, wd, ed = select.select(
+ [self.socket], [], [], self.socket.gettimeout())
+ if not rd:
+ raise timeout('The read operation timed out')
+ else:
+ return self.recv(*args, **kwargs)
+ else:
+ return data
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ _, wlist, _ = select.select([], [self.socket], [],
+ self.socket.gettimeout())
+ if not wlist:
+ raise timeout()
+ continue
+
def sendall(self, data):
- return self.connection.sendall(data)
+ while len(data):
+ sent = self._send_until_done(data)
+ data = data[sent:]
def close(self):
- return self.connection.shutdown()
+ if self._makefile_refs < 1:
+ return self.connection.shutdown()
+ else:
+ self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
@@ -351,6 +252,15 @@ class WrappedSocket(object):
]
}
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
@@ -361,6 +271,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ssl_version=None):
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
if certfile:
+ keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
ctx.use_certificate_file(certfile)
if keyfile:
ctx.use_privatekey_file(keyfile)
@@ -371,6 +282,8 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ctx.load_verify_locations(ca_certs, None)
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
+ else:
+ ctx.set_default_verify_paths()
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
diff --git a/lib/requests/packages/urllib3/exceptions.py b/lib/requests/packages/urllib3/exceptions.py
index 98ef9abc..0c6fd3c5 100644
--- a/lib/requests/packages/urllib3/exceptions.py
+++ b/lib/requests/packages/urllib3/exceptions.py
@@ -1,9 +1,3 @@
-# urllib3/exceptions.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
## Base Exceptions
@@ -11,6 +5,11 @@ class HTTPError(Exception):
"Base exception used by this module."
pass
+class HTTPWarning(Warning):
+ "Base warning used by this module."
+ pass
+
+
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
@@ -49,19 +48,32 @@ class DecodeError(HTTPError):
pass
+class ProtocolError(HTTPError):
+ "Raised when something unexpected happens mid-request/response."
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
## Leaf Exceptions
class MaxRetryError(RequestError):
- "Raised when the maximum number of retries is exceeded."
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
def __init__(self, pool, url, reason=None):
self.reason = reason
- message = "Max retries exceeded with url: %s" % url
- if reason:
- message += " (Caused by %s: %s)" % (type(reason), reason)
- else:
- message += " (Caused by redirect)"
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (
+ url, reason)
RequestError.__init__(self, pool, url, message)
@@ -111,7 +123,12 @@ class ClosedPoolError(PoolError):
pass
-class LocationParseError(ValueError, HTTPError):
+class LocationValueError(ValueError, HTTPError):
+ "Raised when there is something wrong with a given URL input."
+ pass
+
+
+class LocationParseError(LocationValueError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
@@ -119,3 +136,24 @@ class LocationParseError(ValueError, HTTPError):
HTTPError.__init__(self, message)
self.location = location
+
+
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = 'too many error responses'
+ SPECIFIC_ERROR = 'too many {status_code} error responses'
+
+
+class SecurityWarning(HTTPWarning):
+ "Warned when perfoming security reducing actions"
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ "Warned when making an unverified HTTPS request."
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ "Warned when system time is suspected to be wrong"
+ pass
diff --git a/lib/requests/packages/urllib3/fields.py b/lib/requests/packages/urllib3/fields.py
index ed017657..c853f8d5 100644
--- a/lib/requests/packages/urllib3/fields.py
+++ b/lib/requests/packages/urllib3/fields.py
@@ -1,9 +1,3 @@
-# urllib3/fields.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
import email.utils
import mimetypes
@@ -15,7 +9,7 @@ def guess_content_type(filename, default='application/octet-stream'):
Guess the "Content-Type" of a file.
:param filename:
- The filename to guess the "Content-Type" of using :mod:`mimetimes`.
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
@@ -78,9 +72,10 @@ class RequestField(object):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
- Supports constructing :class:`~urllib3.fields.RequestField` from parameter
- of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type)
- tuple where the MIME type is optional. For example: ::
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
@@ -125,8 +120,8 @@ class RequestField(object):
'Content-Disposition' fields.
:param header_parts:
- A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as
- `k1="v1"; k2="v2"; ...`.
+ A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
@@ -158,7 +153,8 @@ class RequestField(object):
lines.append('\r\n')
return '\r\n'.join(lines)
- def make_multipart(self, content_disposition=None, content_type=None, content_location=None):
+ def make_multipart(self, content_disposition=None, content_type=None,
+ content_location=None):
"""
Makes this request field into a multipart request field.
@@ -172,6 +168,10 @@ class RequestField(object):
"""
self.headers['Content-Disposition'] = content_disposition or 'form-data'
- self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))])
+ self.headers['Content-Disposition'] += '; '.join([
+ '', self._render_parts(
+ (('name', self._name), ('filename', self._filename))
+ )
+ ])
self.headers['Content-Type'] = content_type
self.headers['Content-Location'] = content_location
diff --git a/lib/requests/packages/urllib3/filepost.py b/lib/requests/packages/urllib3/filepost.py
index e8b30bdd..0fbf488d 100644
--- a/lib/requests/packages/urllib3/filepost.py
+++ b/lib/requests/packages/urllib3/filepost.py
@@ -1,11 +1,4 @@
-# urllib3/filepost.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
import codecs
-import mimetypes
from uuid import uuid4
from io import BytesIO
@@ -38,10 +31,10 @@ def iter_field_objects(fields):
i = iter(fields)
for field in i:
- if isinstance(field, RequestField):
- yield field
- else:
- yield RequestField.from_tuples(*field)
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
def iter_fields(fields):
diff --git a/lib/requests/packages/urllib3/packages/ordered_dict.py b/lib/requests/packages/urllib3/packages/ordered_dict.py
index 7f8ee154..4479363c 100644
--- a/lib/requests/packages/urllib3/packages/ordered_dict.py
+++ b/lib/requests/packages/urllib3/packages/ordered_dict.py
@@ -2,7 +2,6 @@
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
-
try:
from thread import get_ident as _get_ident
except ImportError:
diff --git a/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
index 3aa5b2e1..dd59a75f 100644
--- a/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
+++ b/lib/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
@@ -7,7 +7,7 @@ except ImportError:
from backports.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# Our vendored copy
- from _implementation import CertificateError, match_hostname
+ from ._implementation import CertificateError, match_hostname
# Not needed, but documenting what we provide.
__all__ = ('CertificateError', 'match_hostname')
diff --git a/lib/requests/packages/urllib3/poolmanager.py b/lib/requests/packages/urllib3/poolmanager.py
index f18ff2bb..515dc962 100644
--- a/lib/requests/packages/urllib3/poolmanager.py
+++ b/lib/requests/packages/urllib3/poolmanager.py
@@ -1,9 +1,3 @@
-# urllib3/poolmanager.py
-# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
import logging
try: # Python 3
@@ -14,8 +8,10 @@ except ImportError:
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
+from .exceptions import LocationValueError
from .request import RequestMethods
-from .util import parse_url
+from .util.url import parse_url
+from .util.retry import Retry
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
@@ -49,7 +45,7 @@ class PoolManager(RequestMethods):
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
- Example: ::
+ Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
@@ -102,10 +98,11 @@ class PoolManager(RequestMethods):
``urllib3.connectionpool.port_by_scheme``.
"""
+ if not host:
+ raise LocationValueError("No host specified.")
+
scheme = scheme or 'http'
-
port = port or port_by_scheme.get(scheme, 80)
-
pool_key = (scheme, host, port)
with self.pools.lock:
@@ -118,6 +115,7 @@ class PoolManager(RequestMethods):
# Make a fresh ConnectionPool of the desired type
pool = self._new_pool(scheme, host, port)
self.pools[pool_key] = pool
+
return pool
def connection_from_url(self, url):
@@ -161,13 +159,18 @@ class PoolManager(RequestMethods):
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
- # RFC 2616, Section 10.3.4
+ # RFC 7231, Section 6.4.4
if response.status == 303:
method = 'GET'
- log.info("Redirecting %s -> %s" % (url, redirect_location))
- kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown
+ retries = kw.get('retries')
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ kw['retries'] = retries.increment(method, redirect_location)
kw['redirect'] = redirect
+
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
return self.urlopen(method, redirect_location, **kw)
@@ -208,12 +211,16 @@ class ProxyManager(PoolManager):
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
+
+ assert proxy.scheme in ("http", "https"), \
+ 'Not supported proxy scheme %s' % proxy.scheme
+
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
- assert self.proxy.scheme in ("http", "https"), \
- 'Not supported proxy scheme %s' % self.proxy.scheme
+
connection_pool_kw['_proxy'] = self.proxy
connection_pool_kw['_proxy_headers'] = self.proxy_headers
+
super(ProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw)
@@ -248,10 +255,10 @@ class ProxyManager(PoolManager):
# For proxied HTTPS requests, httplib sets the necessary headers
# on the CONNECT to the proxy. For HTTP, we'll definitely
# need to set 'Host' at the very least.
- kw['headers'] = self._set_proxy_headers(url, kw.get('headers',
- self.headers))
+ headers = kw.get('headers', self.headers)
+ kw['headers'] = self._set_proxy_headers(url, headers)
- return super(ProxyManager, self).urlopen(method, url, redirect, **kw)
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
diff --git a/lib/requests/packages/urllib3/request.py b/lib/requests/packages/urllib3/request.py
index 2a92cc20..b08d6c92 100644
--- a/lib/requests/packages/urllib3/request.py
+++ b/lib/requests/packages/urllib3/request.py
@@ -1,9 +1,3 @@
-# urllib3/request.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
try:
from urllib.parse import urlencode
except ImportError:
@@ -26,8 +20,8 @@ class RequestMethods(object):
Specifically,
- :meth:`.request_encode_url` is for sending requests whose fields are encoded
- in the URL (such as GET, HEAD, DELETE).
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-form-urlencoded
@@ -51,7 +45,7 @@ class RequestMethods(object):
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
- **kw): # Abstract
+ **kw): # Abstract
raise NotImplemented("Classes extending RequestMethods must implement "
"their own ``urlopen`` method.")
@@ -61,8 +55,8 @@ class RequestMethods(object):
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
- effort. It can be used in most situations, while still having the option
- to drop down to more specific methods when necessary, such as
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
@@ -70,12 +64,12 @@ class RequestMethods(object):
if method in self._encode_url_methods:
return self.request_encode_url(method, url, fields=fields,
- headers=headers,
- **urlopen_kw)
+ headers=headers,
+ **urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields,
- headers=headers,
- **urlopen_kw)
+ headers=headers,
+ **urlopen_kw)
def request_encode_url(self, method, url, fields=None, **urlopen_kw):
"""
@@ -94,18 +88,18 @@ class RequestMethods(object):
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
- :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the
- payload with the appropriate content type. Otherwise
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
:meth:`urllib.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
- safe to use it in other times too. However, it may break request signing,
- such as with OAuth.
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
- the MIME type is optional. For example: ::
+ the MIME type is optional. For example::
fields = {
'foo': 'bar',
@@ -119,23 +113,29 @@ class RequestMethods(object):
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimick behavior of browsers.
- Note that if ``headers`` are supplied, the 'Content-Type' header will be
- overwritten because it depends on the dynamic random boundary string
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
- if encode_multipart:
- body, content_type = encode_multipart_formdata(fields or {},
- boundary=multipart_boundary)
- else:
- body, content_type = (urlencode(fields or {}),
- 'application/x-www-form-urlencoded')
-
if headers is None:
headers = self.headers
- headers_ = {'Content-Type': content_type}
- headers_.update(headers)
+ extra_kw = {'headers': {}}
- return self.urlopen(method, url, body=body, headers=headers_,
- **urlopen_kw)
+ if fields:
+ if 'body' in urlopen_kw:
+ raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.')
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
+ else:
+ body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
+
+ extra_kw['body'] = body
+ extra_kw['headers'] = {'Content-Type': content_type}
+
+ extra_kw['headers'].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/lib/requests/packages/urllib3/response.py b/lib/requests/packages/urllib3/response.py
index 6a1fe1a7..e69de957 100644
--- a/lib/requests/packages/urllib3/response.py
+++ b/lib/requests/packages/urllib3/response.py
@@ -1,21 +1,14 @@
-# urllib3/response.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-import logging
import zlib
import io
+from socket import timeout as SocketTimeout
-from .exceptions import DecodeError
+from ._collections import HTTPHeaderDict
+from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
from .packages.six import string_types as basestring, binary_type
-from .util import is_fp_closed
+from .connection import HTTPException, BaseSSLError
+from .util.response import is_fp_closed
-log = logging.getLogger(__name__)
-
class DeflateDecoder(object):
@@ -55,7 +48,10 @@ class HTTPResponse(io.IOBase):
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
- loaded and decoded on-demand when the ``data`` property is accessed.
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
Extra parameters for behaviour not present in httplib.HTTPResponse:
@@ -79,7 +75,10 @@ class HTTPResponse(io.IOBase):
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
- self.headers = headers or {}
+
+ self.headers = HTTPHeaderDict()
+ if headers:
+ self.headers.update(headers)
self.status = status
self.version = version
self.reason = reason
@@ -87,11 +86,14 @@ class HTTPResponse(io.IOBase):
self.decode_content = decode_content
self._decoder = None
- self._body = body if body and isinstance(body, basestring) else None
+ self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
+ if body and isinstance(body, (basestring, binary_type)):
+ self._body = body
+
self._pool = pool
self._connection = connection
@@ -159,8 +161,8 @@ class HTTPResponse(io.IOBase):
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
- # Note: content-encoding value should be case-insensitive, per RFC 2616
- # Section 3.5
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
@@ -174,23 +176,42 @@ class HTTPResponse(io.IOBase):
flush_decoder = False
try:
- if amt is None:
- # cStringIO doesn't like amt=None
- data = self._fp.read()
- flush_decoder = True
- else:
- cache_content = False
- data = self._fp.read(amt)
- if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
- # Close the connection when no data is returned
- #
- # This is redundant to what httplib/http.client _should_
- # already do. However, versions of python released before
- # December 15, 2012 (http://bugs.python.org/issue16298) do not
- # properly close the connection in all cases. There is no harm
- # in redundantly calling close.
- self._fp.close()
+ try:
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt)
+ if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if not 'read operation timed out' in str(e): # Defensive:
+ # This shouldn't happen but just in case we're missing an edge
+ # case, let's avoid swallowing SSL errors.
+ raise
+
+ raise ReadTimeoutError(self._pool, None, 'Read timed out.')
+
+ except HTTPException as e:
+ # This includes IncompleteRead.
+ raise ProtocolError('Connection broken: %r' % e, e)
self._fp_bytes_read += len(data)
@@ -200,8 +221,7 @@ class HTTPResponse(io.IOBase):
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
- "failed to decode it." % content_encoding,
- e)
+ "failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
@@ -238,7 +258,6 @@ class HTTPResponse(io.IOBase):
if data:
yield data
-
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
@@ -249,17 +268,9 @@ class HTTPResponse(io.IOBase):
with ``original_response=r``.
"""
- # Normalize headers between different versions of Python
- headers = {}
+ headers = HTTPHeaderDict()
for k, v in r.getheaders():
- # Python 3: Header keys are returned capitalised
- k = k.lower()
-
- has_value = headers.get(k)
- if has_value: # Python 3: Repeating header keys are unmerged.
- v = ', '.join([has_value, v])
-
- headers[k] = v
+ headers.add(k, v)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
@@ -301,7 +312,7 @@ class HTTPResponse(io.IOBase):
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
- raise IOError("The file-like object this HTTPResponse is wrapped "
+ raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
@@ -309,4 +320,14 @@ class HTTPResponse(io.IOBase):
return self._fp.flush()
def readable(self):
+ # This method is required for `io` module compatibility.
return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[:len(temp)] = temp
+ return len(temp)
diff --git a/lib/requests/packages/urllib3/util.py b/lib/requests/packages/urllib3/util.py
deleted file mode 100644
index bd266317..00000000
--- a/lib/requests/packages/urllib3/util.py
+++ /dev/null
@@ -1,648 +0,0 @@
-# urllib3/util.py
-# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-#
-# This module is part of urllib3 and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-from base64 import b64encode
-from binascii import hexlify, unhexlify
-from collections import namedtuple
-from hashlib import md5, sha1
-from socket import error as SocketError, _GLOBAL_DEFAULT_TIMEOUT
-import time
-
-try:
- from select import poll, POLLIN
-except ImportError: # `poll` doesn't exist on OSX and other platforms
- poll = False
- try:
- from select import select
- except ImportError: # `select` doesn't exist on AppEngine.
- select = False
-
-try: # Test for SSL features
- SSLContext = None
- HAS_SNI = False
-
- import ssl
- from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
- from ssl import SSLContext # Modern SSL?
- from ssl import HAS_SNI # Has SNI?
-except ImportError:
- pass
-
-from .packages import six
-from .exceptions import LocationParseError, SSLError, TimeoutStateError
-
-
-_Default = object()
-# The default timeout to use for socket connections. This is the attribute used
-# by httplib to define the default timeout
-
-
-def current_time():
- """
- Retrieve the current time, this function is mocked out in unit testing.
- """
- return time.time()
-
-
-class Timeout(object):
- """
- Utility object for storing timeout values.
-
- Example usage:
-
- .. code-block:: python
-
- timeout = urllib3.util.Timeout(connect=2.0, read=7.0)
- pool = HTTPConnectionPool('www.google.com', 80, timeout=timeout)
- pool.request(...) # Etc, etc
-
- :param connect:
- The maximum amount of time to wait for a connection attempt to a server
- to succeed. Omitting the parameter will default the connect timeout to
- the system default, probably `the global default timeout in socket.py
- `_.
- None will set an infinite timeout for connection attempts.
-
- :type connect: integer, float, or None
-
- :param read:
- The maximum amount of time to wait between consecutive
- read operations for a response from the server. Omitting
- the parameter will default the read timeout to the system
- default, probably `the global default timeout in socket.py
- `_.
- None will set an infinite timeout.
-
- :type read: integer, float, or None
-
- :param total:
- This combines the connect and read timeouts into one; the read timeout
- will be set to the time leftover from the connect attempt. In the
- event that both a connect timeout and a total are specified, or a read
- timeout and a total are specified, the shorter timeout will be applied.
-
- Defaults to None.
-
- :type total: integer, float, or None
-
- .. note::
-
- Many factors can affect the total amount of time for urllib3 to return
- an HTTP response. Specifically, Python's DNS resolver does not obey the
- timeout specified on the socket. Other factors that can affect total
- request time include high CPU load, high swap, the program running at a
- low priority level, or other behaviors. The observed running time for
- urllib3 to return a response may be greater than the value passed to
- `total`.
-
- In addition, the read and total timeouts only measure the time between
- read operations on the socket connecting the client and the server,
- not the total amount of time for the request to return a complete
- response. For most requests, the timeout is raised because the server
- has not sent the first byte in the specified time. This is not always
- the case; if a server streams one byte every fifteen seconds, a timeout
- of 20 seconds will not ever trigger, even though the request will
- take several minutes to complete.
-
- If your goal is to cut off any request after a set amount of wall clock
- time, consider having a second "watcher" thread to cut off a slow
- request.
- """
-
- #: A sentinel object representing the default timeout value
- DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
-
- def __init__(self, total=None, connect=_Default, read=_Default):
- self._connect = self._validate_timeout(connect, 'connect')
- self._read = self._validate_timeout(read, 'read')
- self.total = self._validate_timeout(total, 'total')
- self._start_connect = None
-
- def __str__(self):
- return '%s(connect=%r, read=%r, total=%r)' % (
- type(self).__name__, self._connect, self._read, self.total)
-
-
- @classmethod
- def _validate_timeout(cls, value, name):
- """ Check that a timeout attribute is valid
-
- :param value: The timeout value to validate
- :param name: The name of the timeout attribute to validate. This is used
- for clear error messages
- :return: the value
- :raises ValueError: if the type is not an integer or a float, or if it
- is a numeric value less than zero
- """
- if value is _Default:
- return cls.DEFAULT_TIMEOUT
-
- if value is None or value is cls.DEFAULT_TIMEOUT:
- return value
-
- try:
- float(value)
- except (TypeError, ValueError):
- raise ValueError("Timeout value %s was %s, but it must be an "
- "int or float." % (name, value))
-
- try:
- if value < 0:
- raise ValueError("Attempted to set %s timeout to %s, but the "
- "timeout cannot be set to a value less "
- "than 0." % (name, value))
- except TypeError: # Python 3
- raise ValueError("Timeout value %s was %s, but it must be an "
- "int or float." % (name, value))
-
- return value
-
- @classmethod
- def from_float(cls, timeout):
- """ Create a new Timeout from a legacy timeout value.
-
- The timeout value used by httplib.py sets the same timeout on the
- connect(), and recv() socket requests. This creates a :class:`Timeout`
- object that sets the individual timeouts to the ``timeout`` value passed
- to this function.
-
- :param timeout: The legacy timeout value
- :type timeout: integer, float, sentinel default object, or None
- :return: a Timeout object
- :rtype: :class:`Timeout`
- """
- return Timeout(read=timeout, connect=timeout)
-
- def clone(self):
- """ Create a copy of the timeout object
-
- Timeout properties are stored per-pool but each request needs a fresh
- Timeout object to ensure each one has its own start/stop configured.
-
- :return: a copy of the timeout object
- :rtype: :class:`Timeout`
- """
- # We can't use copy.deepcopy because that will also create a new object
- # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
- # detect the user default.
- return Timeout(connect=self._connect, read=self._read,
- total=self.total)
-
- def start_connect(self):
- """ Start the timeout clock, used during a connect() attempt
-
- :raises urllib3.exceptions.TimeoutStateError: if you attempt
- to start a timer that has been started already.
- """
- if self._start_connect is not None:
- raise TimeoutStateError("Timeout timer has already been started.")
- self._start_connect = current_time()
- return self._start_connect
-
- def get_connect_duration(self):
- """ Gets the time elapsed since the call to :meth:`start_connect`.
-
- :return: the elapsed time
- :rtype: float
- :raises urllib3.exceptions.TimeoutStateError: if you attempt
- to get duration for a timer that hasn't been started.
- """
- if self._start_connect is None:
- raise TimeoutStateError("Can't get connect duration for timer "
- "that has not started.")
- return current_time() - self._start_connect
-
- @property
- def connect_timeout(self):
- """ Get the value to use when setting a connection timeout.
-
- This will be a positive float or integer, the value None
- (never timeout), or the default system timeout.
-
- :return: the connect timeout
- :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
- """
- if self.total is None:
- return self._connect
-
- if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
- return self.total
-
- return min(self._connect, self.total)
-
- @property
- def read_timeout(self):
- """ Get the value for the read timeout.
-
- This assumes some time has elapsed in the connection timeout and
- computes the read timeout appropriately.
-
- If self.total is set, the read timeout is dependent on the amount of
- time taken by the connect timeout. If the connection time has not been
- established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
- raised.
-
- :return: the value to use for the read timeout
- :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
- :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
- has not yet been called on this object.
- """
- if (self.total is not None and
- self.total is not self.DEFAULT_TIMEOUT and
- self._read is not None and
- self._read is not self.DEFAULT_TIMEOUT):
- # in case the connect timeout has not yet been established.
- if self._start_connect is None:
- return self._read
- return max(0, min(self.total - self.get_connect_duration(),
- self._read))
- elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
- return max(0, self.total - self.get_connect_duration())
- else:
- return self._read
-
-
-class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])):
- """
- Datastructure for representing an HTTP URL. Used as a return value for
- :func:`parse_url`.
- """
- slots = ()
-
- def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None):
- return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment)
-
- @property
- def hostname(self):
- """For backwards-compatibility with urlparse. We're nice like that."""
- return self.host
-
- @property
- def request_uri(self):
- """Absolute path including the query string."""
- uri = self.path or '/'
-
- if self.query is not None:
- uri += '?' + self.query
-
- return uri
-
- @property
- def netloc(self):
- """Network location including host and port"""
- if self.port:
- return '%s:%d' % (self.host, self.port)
- return self.host
-
-
-def split_first(s, delims):
- """
- Given a string and an iterable of delimiters, split on the first found
- delimiter. Return two split parts and the matched delimiter.
-
- If not found, then the first part is the full input string.
-
- Example: ::
-
- >>> split_first('foo/bar?baz', '?/=')
- ('foo', 'bar?baz', '/')
- >>> split_first('foo/bar?baz', '123')
- ('foo/bar?baz', '', None)
-
- Scales linearly with number of delims. Not ideal for large number of delims.
- """
- min_idx = None
- min_delim = None
- for d in delims:
- idx = s.find(d)
- if idx < 0:
- continue
-
- if min_idx is None or idx < min_idx:
- min_idx = idx
- min_delim = d
-
- if min_idx is None or min_idx < 0:
- return s, '', None
-
- return s[:min_idx], s[min_idx+1:], min_delim
-
-
-def parse_url(url):
- """
- Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
- performed to parse incomplete urls. Fields not provided will be None.
-
- Partly backwards-compatible with :mod:`urlparse`.
-
- Example: ::
-
- >>> parse_url('http://google.com/mail/')
- Url(scheme='http', host='google.com', port=None, path='/', ...)
- >>> parse_url('google.com:80')
- Url(scheme=None, host='google.com', port=80, path=None, ...)
- >>> parse_url('/foo?bar')
- Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
- """
-
- # While this code has overlap with stdlib's urlparse, it is much
- # simplified for our needs and less annoying.
- # Additionally, this implementations does silly things to be optimal
- # on CPython.
-
- scheme = None
- auth = None
- host = None
- port = None
- path = None
- fragment = None
- query = None
-
- # Scheme
- if '://' in url:
- scheme, url = url.split('://', 1)
-
- # Find the earliest Authority Terminator
- # (http://tools.ietf.org/html/rfc3986#section-3.2)
- url, path_, delim = split_first(url, ['/', '?', '#'])
-
- if delim:
- # Reassemble the path
- path = delim + path_
-
- # Auth
- if '@' in url:
- # Last '@' denotes end of auth part
- auth, url = url.rsplit('@', 1)
-
- # IPv6
- if url and url[0] == '[':
- host, url = url.split(']', 1)
- host += ']'
-
- # Port
- if ':' in url:
- _host, port = url.split(':', 1)
-
- if not host:
- host = _host
-
- if port:
- # If given, ports must be integers.
- if not port.isdigit():
- raise LocationParseError("Failed to parse: %s" % url)
- port = int(port)
- else:
- # Blank ports are cool, too. (rfc3986#section-3.2.3)
- port = None
-
- elif not host and url:
- host = url
-
- if not path:
- return Url(scheme, auth, host, port, path, query, fragment)
-
- # Fragment
- if '#' in path:
- path, fragment = path.split('#', 1)
-
- # Query
- if '?' in path:
- path, query = path.split('?', 1)
-
- return Url(scheme, auth, host, port, path, query, fragment)
-
-
-def get_host(url):
- """
- Deprecated. Use :func:`.parse_url` instead.
- """
- p = parse_url(url)
- return p.scheme or 'http', p.hostname, p.port
-
-
-def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
- basic_auth=None, proxy_basic_auth=None):
- """
- Shortcuts for generating request headers.
-
- :param keep_alive:
- If ``True``, adds 'connection: keep-alive' header.
-
- :param accept_encoding:
- Can be a boolean, list, or string.
- ``True`` translates to 'gzip,deflate'.
- List will get joined by comma.
- String will be used as provided.
-
- :param user_agent:
- String representing the user-agent you want, such as
- "python-urllib3/0.6"
-
- :param basic_auth:
- Colon-separated username:password string for 'authorization: basic ...'
- auth header.
-
- :param proxy_basic_auth:
- Colon-separated username:password string for 'proxy-authorization: basic ...'
- auth header.
-
- Example: ::
-
- >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
- {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
- >>> make_headers(accept_encoding=True)
- {'accept-encoding': 'gzip,deflate'}
- """
- headers = {}
- if accept_encoding:
- if isinstance(accept_encoding, str):
- pass
- elif isinstance(accept_encoding, list):
- accept_encoding = ','.join(accept_encoding)
- else:
- accept_encoding = 'gzip,deflate'
- headers['accept-encoding'] = accept_encoding
-
- if user_agent:
- headers['user-agent'] = user_agent
-
- if keep_alive:
- headers['connection'] = 'keep-alive'
-
- if basic_auth:
- headers['authorization'] = 'Basic ' + \
- b64encode(six.b(basic_auth)).decode('utf-8')
-
- if proxy_basic_auth:
- headers['proxy-authorization'] = 'Basic ' + \
- b64encode(six.b(proxy_basic_auth)).decode('utf-8')
-
- return headers
-
-
-def is_connection_dropped(conn): # Platform-specific
- """
- Returns True if the connection is dropped and should be closed.
-
- :param conn:
- :class:`httplib.HTTPConnection` object.
-
- Note: For platforms like AppEngine, this will always return ``False`` to
- let the platform handle connection recycling transparently for us.
- """
- sock = getattr(conn, 'sock', False)
- if not sock: # Platform-specific: AppEngine
- return False
-
- if not poll:
- if not select: # Platform-specific: AppEngine
- return False
-
- try:
- return select([sock], [], [], 0.0)[0]
- except SocketError:
- return True
-
- # This version is better on platforms that support it.
- p = poll()
- p.register(sock, POLLIN)
- for (fno, ev) in p.poll(0.0):
- if fno == sock.fileno():
- # Either data is buffered (bad), or the connection is dropped.
- return True
-
-
-def resolve_cert_reqs(candidate):
- """
- Resolves the argument to a numeric constant, which can be passed to
- the wrap_socket function/method from the ssl module.
- Defaults to :data:`ssl.CERT_NONE`.
- If given a string it is assumed to be the name of the constant in the
- :mod:`ssl` module or its abbrevation.
- (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
- If it's neither `None` nor a string we assume it is already the numeric
- constant which can directly be passed to wrap_socket.
- """
- if candidate is None:
- return CERT_NONE
-
- if isinstance(candidate, str):
- res = getattr(ssl, candidate, None)
- if res is None:
- res = getattr(ssl, 'CERT_' + candidate)
- return res
-
- return candidate
-
-
-def resolve_ssl_version(candidate):
- """
- like resolve_cert_reqs
- """
- if candidate is None:
- return PROTOCOL_SSLv23
-
- if isinstance(candidate, str):
- res = getattr(ssl, candidate, None)
- if res is None:
- res = getattr(ssl, 'PROTOCOL_' + candidate)
- return res
-
- return candidate
-
-
-def assert_fingerprint(cert, fingerprint):
- """
- Checks if given fingerprint matches the supplied certificate.
-
- :param cert:
- Certificate as bytes object.
- :param fingerprint:
- Fingerprint as string of hexdigits, can be interspersed by colons.
- """
-
- # Maps the length of a digest to a possible hash function producing
- # this digest.
- hashfunc_map = {
- 16: md5,
- 20: sha1
- }
-
- fingerprint = fingerprint.replace(':', '').lower()
-
- digest_length, rest = divmod(len(fingerprint), 2)
-
- if rest or digest_length not in hashfunc_map:
- raise SSLError('Fingerprint is of invalid length.')
-
- # We need encode() here for py32; works on py2 and p33.
- fingerprint_bytes = unhexlify(fingerprint.encode())
-
- hashfunc = hashfunc_map[digest_length]
-
- cert_digest = hashfunc(cert).digest()
-
- if not cert_digest == fingerprint_bytes:
- raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
- .format(hexlify(fingerprint_bytes),
- hexlify(cert_digest)))
-
-def is_fp_closed(obj):
- """
- Checks whether a given file-like object is closed.
-
- :param obj:
- The file-like object to check.
- """
- if hasattr(obj, 'fp'):
- # Object is a container for another file-like object that gets released
- # on exhaustion (e.g. HTTPResponse)
- return obj.fp is None
-
- return obj.closed
-
-
-if SSLContext is not None: # Python 3.2+
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
- ca_certs=None, server_hostname=None,
- ssl_version=None):
- """
- All arguments except `server_hostname` have the same meaning as for
- :func:`ssl.wrap_socket`
-
- :param server_hostname:
- Hostname of the expected certificate
- """
- context = SSLContext(ssl_version)
- context.verify_mode = cert_reqs
-
- # Disable TLS compression to migitate CRIME attack (issue #309)
- OP_NO_COMPRESSION = 0x20000
- context.options |= OP_NO_COMPRESSION
-
- if ca_certs:
- try:
- context.load_verify_locations(ca_certs)
- # Py32 raises IOError
- # Py33 raises FileNotFoundError
- except Exception as e: # Reraise as SSLError
- raise SSLError(e)
- if certfile:
- # FIXME: This block needs a test.
- context.load_cert_chain(certfile, keyfile)
- if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
- return context.wrap_socket(sock, server_hostname=server_hostname)
- return context.wrap_socket(sock)
-
-else: # Python 3.1 and earlier
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
- ca_certs=None, server_hostname=None,
- ssl_version=None):
- return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
- ca_certs=ca_certs, cert_reqs=cert_reqs,
- ssl_version=ssl_version)
diff --git a/lib/requests/packages/urllib3/util/__init__.py b/lib/requests/packages/urllib3/util/__init__.py
new file mode 100644
index 00000000..8becc814
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/__init__.py
@@ -0,0 +1,24 @@
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import make_headers
+from .response import is_fp_closed
+from .ssl_ import (
+ SSLContext,
+ HAS_SNI,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .timeout import (
+ current_time,
+ Timeout,
+)
+
+from .retry import Retry
+from .url import (
+ get_host,
+ parse_url,
+ split_first,
+ Url,
+)
diff --git a/lib/requests/packages/urllib3/util/connection.py b/lib/requests/packages/urllib3/util/connection.py
new file mode 100644
index 00000000..2156993a
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/connection.py
@@ -0,0 +1,97 @@
+import socket
+try:
+ from select import poll, POLLIN
+except ImportError: # `poll` doesn't exist on OSX and other platforms
+ poll = False
+ try:
+ from select import select
+ except ImportError: # `select` doesn't exist on AppEngine.
+ select = False
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`httplib.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, 'sock', False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+
+ if not poll:
+ if not select: # Platform-specific: AppEngine
+ return False
+
+ try:
+ return select([sock], [], [], 0.0)[0]
+ except socket.error:
+ return True
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None, socket_options=None):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ err = None
+ for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ # This is the only addition urllib3 makes to this function.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as _:
+ err = _
+ if sock is not None:
+ sock.close()
+
+ if err is not None:
+ raise err
+ else:
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
diff --git a/lib/requests/packages/urllib3/util/request.py b/lib/requests/packages/urllib3/util/request.py
new file mode 100644
index 00000000..bc64f6b1
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/request.py
@@ -0,0 +1,71 @@
+from base64 import b64encode
+
+from ..packages.six import b
+
+ACCEPT_ENCODING = 'gzip,deflate'
+
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None, proxy_basic_auth=None, disable_cache=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(b(basic_auth)).decode('utf-8')
+
+ if proxy_basic_auth:
+ headers['proxy-authorization'] = 'Basic ' + \
+ b64encode(b(proxy_basic_auth)).decode('utf-8')
+
+ if disable_cache:
+ headers['cache-control'] = 'no-cache'
+
+ return headers
diff --git a/lib/requests/packages/urllib3/util/response.py b/lib/requests/packages/urllib3/util/response.py
new file mode 100644
index 00000000..45fff552
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/response.py
@@ -0,0 +1,22 @@
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
diff --git a/lib/requests/packages/urllib3/util/retry.py b/lib/requests/packages/urllib3/util/retry.py
new file mode 100644
index 00000000..7e0959df
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/retry.py
@@ -0,0 +1,285 @@
+import time
+import logging
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
+)
+from ..packages import six
+
+
+log = logging.getLogger(__name__)
+
+
+class Retry(object):
+ """ Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts. It's a good idea to set this to some sensibly-high value to
+ account for unexpected edge cases and avoid infinite retry loops.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param iterable method_whitelist:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ indempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
+
+ :param iterable status_forcelist:
+ A set of HTTP status codes that we should force a retry on.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts. urllib3 will sleep for::
+
+ {backoff factor} * (2 ^ ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.1s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.MAX_BACKOFF`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+ """
+
+ DEFAULT_METHOD_WHITELIST = frozenset([
+ 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE'])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(self, total=10, connect=None, read=None, redirect=None,
+ method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,
+ backoff_factor=0, raise_on_redirect=True, _observed_errors=0):
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.method_whitelist = method_whitelist
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self._observed_errors = _observed_errors # TODO: use .history instead?
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect, read=self.read, redirect=self.redirect,
+ method_whitelist=self.method_whitelist,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ _observed_errors=self._observed_errors,
+ )
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """ Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r" % (retries, new_retries))
+ return new_retries
+
+ def get_backoff_time(self):
+ """ Formula for computing the current backoff
+
+ :rtype: float
+ """
+ if self._observed_errors <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (self._observed_errors - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def sleep(self):
+ """ Sleep between retry attempts using an exponential backoff.
+
+ By default, the backoff factor is 0 and this method will return
+ immediately.
+ """
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def _is_connection_error(self, err):
+ """ Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def is_forced_retry(self, method, status_code):
+ """ Is this method/status code retryable? (Based on method/codes whitelists)
+ """
+ if self.method_whitelist and method.upper() not in self.method_whitelist:
+ return False
+
+ return self.status_forcelist and status_code in self.status_forcelist
+
+ def is_exhausted(self):
+ """ Are we out of retries? """
+ retry_counts = (self.total, self.connect, self.read, self.redirect)
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
+ """ Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ _observed_errors = self._observed_errors
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ cause = 'unknown'
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+ _observed_errors += 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+ _observed_errors += 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = 'too many redirects'
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
+ _observed_errors += 1
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ cause = ResponseError.SPECIFIC_ERROR.format(
+ status_code=response.status)
+
+ new_retry = self.new(
+ total=total,
+ connect=connect, read=read, redirect=redirect,
+ _observed_errors=_observed_errors)
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
+
+ return new_retry
+
+
+ def __repr__(self):
+ return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
+ 'read={self.read}, redirect={self.redirect})').format(
+ cls=type(self), self=self)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/lib/requests/packages/urllib3/util/ssl_.py b/lib/requests/packages/urllib3/util/ssl_.py
new file mode 100644
index 00000000..a788b1b9
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/ssl_.py
@@ -0,0 +1,254 @@
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1
+
+from ..exceptions import SSLError
+
+
+SSLContext = None
+HAS_SNI = False
+create_default_context = None
+
+import errno
+import ssl
+
+try: # Test for SSL features
+ from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+try:
+ from ssl import _DEFAULT_CIPHERS
+except ImportError:
+ _DEFAULT_CIPHERS = (
+ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
+ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
+ 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
+ )
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+ import sys
+
+ class SSLContext(object): # Platform-specific: Python 2 & 3.1
+ supports_set_ciphers = sys.version_info >= (2, 7)
+
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, location):
+ self.ca_certs = location
+
+ def set_ciphers(self, cipher_suite):
+ if not self.supports_set_ciphers:
+ raise TypeError(
+ 'Your version of Python does not support setting '
+ 'a custom cipher suite. Please upgrade to Python '
+ '2.7, 3.2, or later if you need this functionality.'
+ )
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None):
+ kwargs = {
+ 'keyfile': self.keyfile,
+ 'certfile': self.certfile,
+ 'ca_certs': self.ca_certs,
+ 'cert_reqs': self.verify_mode,
+ 'ssl_version': self.protocol,
+ }
+ if self.supports_set_ciphers: # Platform-specific: Python 2.7+
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+ else: # Platform-specific: Python 2.6
+ return wrap_socket(socket, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ # Maps the length of a digest to a possible hash function producing
+ # this digest.
+ hashfunc_map = {
+ 16: md5,
+ 20: sha1
+ }
+
+ fingerprint = fingerprint.replace(':', '').lower()
+ digest_length, odd = divmod(len(fingerprint), 2)
+
+ if odd or digest_length not in hashfunc_map:
+ raise SSLError('Fingerprint is of invalid length.')
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ hashfunc = hashfunc_map[digest_length]
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not cert_digest == fingerprint_bytes:
+ raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
+ .format(hexlify(fingerprint_bytes),
+ hexlify(cert_digest)))
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_NONE`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbrevation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_NONE
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'CERT_' + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_SSLv23
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'PROTOCOL_' + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
+ options=None, ciphers=None):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
+ context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
+
+ context.verify_mode = cert_reqs
+ if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
+ context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED)
+ return context
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ciphers=None, ssl_context=None):
+ """
+ All arguments except for server_hostname and ssl_context have the same
+ meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support. This is not
+ supported on Python 2.6 as the ssl module does not support it.
+ """
+ context = ssl_context
+ if context is None:
+ context = create_urllib3_context(ssl_version, cert_reqs,
+ ciphers=ciphers)
+
+ if ca_certs:
+ try:
+ context.load_verify_locations(ca_certs)
+ except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
+ raise SSLError(e)
+ raise
+ if certfile:
+ context.load_cert_chain(certfile, keyfile)
+ if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+ return context.wrap_socket(sock)
diff --git a/lib/requests/packages/urllib3/util/timeout.py b/lib/requests/packages/urllib3/util/timeout.py
new file mode 100644
index 00000000..ea7027f3
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/timeout.py
@@ -0,0 +1,240 @@
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+import time
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+def current_time():
+ """
+ Retrieve the current time. This function is mocked out in unit testing.
+ """
+ return time.time()
+
+
+class Timeout(object):
+ """ Timeout configuration.
+
+ Timeouts can be defined as a default for a pool::
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``::
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: integer, float, or None
+
+ :param connect:
+ The maximum amount of time to wait for a connection attempt to a server
+ to succeed. Omitting the parameter will default the connect timeout to
+ the system default, probably `the global default timeout in socket.py
+ `_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: integer, float, or None
+
+ :param read:
+ The maximum amount of time to wait between consecutive
+ read operations for a response from the server. Omitting
+ the parameter will default the read timeout to the system
+ default, probably `the global default timeout in socket.py
+ `_.
+ None will set an infinite timeout.
+
+ :type read: integer, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, 'connect')
+ self._read = self._validate_timeout(read, 'read')
+ self.total = self._validate_timeout(total, 'total')
+ self._start_connect = None
+
+ def __str__(self):
+ return '%s(connect=%r, read=%r, total=%r)' % (
+ type(self).__name__, self._connect, self._read, self.total)
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """ Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If the type is not an integer or a float, or if it
+ is a numeric value less than zero.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ try:
+ if value < 0:
+ raise ValueError("Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than 0." % (name, value))
+ except TypeError: # Python 3
+ raise ValueError("Timeout value %s was %s, but it must be an "
+ "int or float." % (name, value))
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """ Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """ Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read,
+ total=self.total)
+
+ def start_connect(self):
+ """ Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """ Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError("Can't get connect duration for timer "
+ "that has not started.")
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """ Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """ Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (self.total is not None and
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(),
+ self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/lib/requests/packages/urllib3/util/url.py b/lib/requests/packages/urllib3/util/url.py
new file mode 100644
index 00000000..b2ec834f
--- /dev/null
+++ b/lib/requests/packages/urllib3/util/url.py
@@ -0,0 +1,212 @@
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+
+
+url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
+
+
+class Url(namedtuple('Url', url_attrs)):
+ """
+ Datastructure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`.
+ """
+ slots = ()
+
+ def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
+ query=None, fragment=None):
+ return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
+ query, fragment)
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or '/'
+
+ if self.query is not None:
+ uri += '?' + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return '%s:%d' % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = ''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + '://'
+ if auth is not None:
+ url += auth + '@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += '?' + query
+ if fragment is not None:
+ url += '#' + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+def split_first(s, delims):
+ """
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, '', None
+
+ return s[:min_idx], s[min_idx+1:], min_delim
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+
+ # While this code has overlap with stdlib's urlparse, it is much
+ # simplified for our needs and less annoying.
+ # Additionally, this implementations does silly things to be optimal
+ # on CPython.
+
+ if not url:
+ # Empty
+ return Url()
+
+ scheme = None
+ auth = None
+ host = None
+ port = None
+ path = None
+ fragment = None
+ query = None
+
+ # Scheme
+ if '://' in url:
+ scheme, url = url.split('://', 1)
+
+ # Find the earliest Authority Terminator
+ # (http://tools.ietf.org/html/rfc3986#section-3.2)
+ url, path_, delim = split_first(url, ['/', '?', '#'])
+
+ if delim:
+ # Reassemble the path
+ path = delim + path_
+
+ # Auth
+ if '@' in url:
+ # Last '@' denotes end of auth part
+ auth, url = url.rsplit('@', 1)
+
+ # IPv6
+ if url and url[0] == '[':
+ host, url = url.split(']', 1)
+ host += ']'
+
+ # Port
+ if ':' in url:
+ _host, port = url.split(':', 1)
+
+ if not host:
+ host = _host
+
+ if port:
+ # If given, ports must be integers.
+ if not port.isdigit():
+ raise LocationParseError(url)
+ port = int(port)
+ else:
+ # Blank ports are cool, too. (rfc3986#section-3.2.3)
+ port = None
+
+ elif not host and url:
+ host = url
+
+ if not path:
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+ # Fragment
+ if '#' in path:
+ path, fragment = path.split('#', 1)
+
+ # Query
+ if '?' in path:
+ path, query = path.split('?', 1)
+
+ return Url(scheme, auth, host, port, path, query, fragment)
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`.parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or 'http', p.hostname, p.port
diff --git a/lib/requests/sessions.py b/lib/requests/sessions.py
index 425db22c..4f306963 100644
--- a/lib/requests/sessions.py
+++ b/lib/requests/sessions.py
@@ -12,24 +12,32 @@ import os
from collections import Mapping
from datetime import datetime
-from .compat import cookielib, OrderedDict, urljoin, urlparse, builtin_str
+from .auth import _basic_auth_str
+from .compat import cookielib, OrderedDict, urljoin, urlparse
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers, to_native_string
-from .exceptions import TooManyRedirects, InvalidSchema
+from .exceptions import (
+ TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
+from .packages.urllib3._collections import RecentlyUsedContainer
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
-from .utils import requote_uri, get_environ_proxies, get_netrc_auth
+from .utils import (
+ requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
+ get_auth_from_url
+)
from .status_codes import codes
# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI
+REDIRECT_CACHE_SIZE = 1000
+
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
@@ -86,11 +94,21 @@ class SessionRedirectMixin(object):
"""Receives a Response. Returns a generator of Responses."""
i = 0
+ hist = [] # keep track of history
while resp.is_redirect:
prepared_request = req.copy()
- resp.content # Consume socket so it can be released
+ if i > 0:
+ # Update history and keep track of redirects.
+ hist.append(resp)
+ new_hist = list(hist)
+ resp.history = new_hist
+
+ try:
+ resp.content # Consume socket so it can be released
+ except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
+ resp.raw.read(decode_content=False)
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects)
@@ -110,17 +128,20 @@ class SessionRedirectMixin(object):
parsed = urlparse(url)
url = parsed.geturl()
- # Facilitate non-RFC2616-compliant 'location' headers
+ # Facilitate relative 'location' headers, as allowed by RFC 7231.
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
- if not urlparse(url).netloc:
+ if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = to_native_string(url)
+ # Cache the url, unless it redirects to itself.
+ if resp.is_permanent_redirect and req.url != prepared_request.url:
+ self.redirect_cache[req.url] = prepared_request.url
- # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
+ # http://tools.ietf.org/html/rfc7231#section-6.4.4
if (resp.status_code == codes.see_other and
method != 'HEAD'):
method = 'GET'
@@ -138,7 +159,7 @@ class SessionRedirectMixin(object):
prepared_request.method = method
# https://github.com/kennethreitz/requests/issues/1084
- if resp.status_code not in (codes.temporary, codes.resume):
+ if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
@@ -154,22 +175,15 @@ class SessionRedirectMixin(object):
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
- if 'Authorization' in headers:
- # If we get redirected to a new host, we should strip out any
- # authentication headers.
- original_parsed = urlparse(resp.request.url)
- redirect_parsed = urlparse(url)
+ # Rebuild auth and proxy information.
+ proxies = self.rebuild_proxies(prepared_request, proxies)
+ self.rebuild_auth(prepared_request, resp)
- if (original_parsed.hostname != redirect_parsed.hostname):
- del headers['Authorization']
-
- # .netrc might have more auth for us.
- new_auth = get_netrc_auth(url) if self.trust_env else None
- if new_auth is not None:
- prepared_request.prepare_auth(new_auth)
+ # Override the original request.
+ req = prepared_request
resp = self.send(
- prepared_request,
+ req,
stream=stream,
timeout=timeout,
verify=verify,
@@ -183,6 +197,68 @@ class SessionRedirectMixin(object):
i += 1
yield resp
+ def rebuild_auth(self, prepared_request, response):
+ """
+ When being redirected we may want to strip authentication from the
+ request to avoid leaking credentials. This method intelligently removes
+ and reapplies authentication where possible to avoid credential loss.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+
+ if 'Authorization' in headers:
+ # If we get redirected to a new host, we should strip out any
+ # authentication headers.
+ original_parsed = urlparse(response.request.url)
+ redirect_parsed = urlparse(url)
+
+ if (original_parsed.hostname != redirect_parsed.hostname):
+ del headers['Authorization']
+
+ # .netrc might have more auth for us on our new host.
+ new_auth = get_netrc_auth(url) if self.trust_env else None
+ if new_auth is not None:
+ prepared_request.prepare_auth(new_auth)
+
+ return
+
+ def rebuild_proxies(self, prepared_request, proxies):
+ """
+ This method re-evaluates the proxy configuration by considering the
+ environment variables. If we are redirected to a URL covered by
+ NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
+ proxy keys for this URL (in case they were stripped by a previous
+ redirect).
+
+ This method also replaces the Proxy-Authorization header where
+ necessary.
+ """
+ headers = prepared_request.headers
+ url = prepared_request.url
+ scheme = urlparse(url).scheme
+ new_proxies = proxies.copy() if proxies is not None else {}
+
+ if self.trust_env and not should_bypass_proxies(url):
+ environ_proxies = get_environ_proxies(url)
+
+ proxy = environ_proxies.get(scheme)
+
+ if proxy:
+ new_proxies.setdefault(scheme, environ_proxies[scheme])
+
+ if 'Proxy-Authorization' in headers:
+ del headers['Proxy-Authorization']
+
+ try:
+ username, password = get_auth_from_url(new_proxies[scheme])
+ except KeyError:
+ username, password = None, None
+
+ if username and password:
+ headers['Proxy-Authorization'] = _basic_auth_str(username, password)
+
+ return new_proxies
+
class Session(SessionRedirectMixin):
"""A Requests session.
@@ -198,9 +274,10 @@ class Session(SessionRedirectMixin):
"""
__attrs__ = [
- 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
- 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream',
- 'trust_env', 'max_redirects']
+ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
+ 'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
+ 'max_redirects',
+ ]
def __init__(self):
@@ -253,6 +330,9 @@ class Session(SessionRedirectMixin):
self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
+ # Only store 1000 redirects to prevent using infinite memory
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+
def __enter__(self):
return self
@@ -290,6 +370,7 @@ class Session(SessionRedirectMixin):
url=request.url,
files=request.files,
data=request.data,
+ json=request.json,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
@@ -311,7 +392,8 @@ class Session(SessionRedirectMixin):
hooks=None,
stream=None,
verify=None,
- cert=None):
+ cert=None,
+ json=None):
"""Constructs a :class:`Request `, prepares it and sends it.
Returns :class:`Response ` object.
@@ -321,17 +403,22 @@ class Session(SessionRedirectMixin):
string for the :class:`Request`.
:param data: (optional) Dictionary or bytes to send in the body of the
:class:`Request`.
+ :param json: (optional) json to send in the body of the
+ :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
- :param files: (optional) Dictionary of 'filename': file-like-objects
+ :param files: (optional) Dictionary of ``'filename': file-like-objects``
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
- :param timeout: (optional) Float describing the timeout of the
- request in seconds.
- :param allow_redirects: (optional) Boolean. Set to True by default.
+ :param timeout: (optional) How long to wait for the server to send
+ data before giving up, as a float, or a (`connect timeout, read
+ timeout `_) tuple.
+ :type timeout: float or tuple
+ :param allow_redirects: (optional) Set to True by default.
+ :type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of
the proxy.
:param stream: (optional) whether to immediately download the response
@@ -342,7 +429,7 @@ class Session(SessionRedirectMixin):
If Tuple, ('cert', 'key') pair.
"""
- method = builtin_str(method)
+ method = to_native_string(method)
# Create the Request.
req = Request(
@@ -351,6 +438,7 @@ class Session(SessionRedirectMixin):
headers = headers,
files = files,
data = data or {},
+ json = json,
params = params or {},
auth = auth,
cookies = cookies,
@@ -360,36 +448,16 @@ class Session(SessionRedirectMixin):
proxies = proxies or {}
- # Gather clues from the surrounding environment.
- if self.trust_env:
- # Set environment's proxies.
- env_proxies = get_environ_proxies(url) or {}
- for (k, v) in env_proxies.items():
- proxies.setdefault(k, v)
-
- # Look for configuration.
- if not verify and verify is not False:
- verify = os.environ.get('REQUESTS_CA_BUNDLE')
-
- # Curl compatibility.
- if not verify and verify is not False:
- verify = os.environ.get('CURL_CA_BUNDLE')
-
- # Merge all the kwargs.
- proxies = merge_setting(proxies, self.proxies)
- stream = merge_setting(stream, self.stream)
- verify = merge_setting(verify, self.verify)
- cert = merge_setting(cert, self.cert)
+ settings = self.merge_environment_settings(
+ prep.url, proxies, stream, verify, cert
+ )
# Send the request.
send_kwargs = {
- 'stream': stream,
'timeout': timeout,
- 'verify': verify,
- 'cert': cert,
- 'proxies': proxies,
'allow_redirects': allow_redirects,
}
+ send_kwargs.update(settings)
resp = self.send(prep, **send_kwargs)
return resp
@@ -424,15 +492,16 @@ class Session(SessionRedirectMixin):
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
- def post(self, url, data=None, **kwargs):
+ def post(self, url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
+ :param json: (optional) json to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
- return self.request('POST', url, data=data, **kwargs)
+ return self.request('POST', url, data=data, json=json, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
@@ -477,6 +546,14 @@ class Session(SessionRedirectMixin):
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.')
+ checked_urls = set()
+ while request.url in self.redirect_cache:
+ checked_urls.add(request.url)
+ new_url = self.redirect_cache.get(request.url)
+ if new_url in checked_urls:
+ break
+ request.url = new_url
+
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
@@ -527,10 +604,37 @@ class Session(SessionRedirectMixin):
history.insert(0, r)
# Get the last request made
r = history.pop()
- r.history = tuple(history)
+ r.history = history
+
+ if not stream:
+ r.content
return r
+ def merge_environment_settings(self, url, proxies, stream, verify, cert):
+ """Check the environment and merge it with some settings."""
+ # Gather clues from the surrounding environment.
+ if self.trust_env:
+ # Set environment's proxies.
+ env_proxies = get_environ_proxies(url) or {}
+ for (k, v) in env_proxies.items():
+ proxies.setdefault(k, v)
+
+ # Look for requests environment configuration and be compatible
+ # with cURL.
+ if verify is True or verify is None:
+ verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
+ os.environ.get('CURL_CA_BUNDLE'))
+
+ # Merge all the kwargs.
+ proxies = merge_setting(proxies, self.proxies)
+ stream = merge_setting(stream, self.stream)
+ verify = merge_setting(verify, self.verify)
+ cert = merge_setting(cert, self.cert)
+
+ return {'verify': verify, 'proxies': proxies, 'stream': stream,
+ 'cert': cert}
+
def get_adapter(self, url):
"""Returns the appropriate connnection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
@@ -558,12 +662,19 @@ class Session(SessionRedirectMixin):
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
- return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
+ state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
+ state['redirect_cache'] = dict(self.redirect_cache)
+ return state
def __setstate__(self, state):
+ redirect_cache = state.pop('redirect_cache', {})
for attr, value in state.items():
setattr(self, attr, value)
+ self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
+ for redirect, to in redirect_cache.items():
+ self.redirect_cache[redirect] = to
+
def session():
"""Returns a :class:`Session` for context-management."""
diff --git a/lib/requests/status_codes.py b/lib/requests/status_codes.py
index ed7a8660..e0887f21 100644
--- a/lib/requests/status_codes.py
+++ b/lib/requests/status_codes.py
@@ -30,7 +30,8 @@ _codes = {
305: ('use_proxy',),
306: ('switch_proxy',),
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
- 308: ('resume_incomplete', 'resume'),
+ 308: ('permanent_redirect',
+ 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
# Client Error.
400: ('bad_request', 'bad'),
diff --git a/lib/requests/structures.py b/lib/requests/structures.py
index a1759137..3e5f2faa 100644
--- a/lib/requests/structures.py
+++ b/lib/requests/structures.py
@@ -8,30 +8,7 @@ Data structures that power Requests.
"""
-import os
import collections
-from itertools import islice
-
-
-class IteratorProxy(object):
- """docstring for IteratorProxy"""
- def __init__(self, i):
- self.i = i
- # self.i = chain.from_iterable(i)
-
- def __iter__(self):
- return self.i
-
- def __len__(self):
- if hasattr(self.i, '__len__'):
- return len(self.i)
- if hasattr(self.i, 'len'):
- return self.i.len
- if hasattr(self.i, 'fileno'):
- return os.fstat(self.i.fileno()).st_size
-
- def read(self, n):
- return "".join(islice(self.i, None, n))
class CaseInsensitiveDict(collections.MutableMapping):
@@ -46,7 +23,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
- testing is case insensitive:
+ testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
@@ -106,8 +83,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
-
+ return str(dict(self.items()))
class LookupDict(dict):
"""Dictionary lookup object."""
diff --git a/lib/requests/utils.py b/lib/requests/utils.py
index 4d648bc5..74679414 100644
--- a/lib/requests/utils.py
+++ b/lib/requests/utils.py
@@ -19,15 +19,16 @@ import re
import sys
import socket
import struct
+import warnings
from . import __version__
from . import certs
from .compat import parse_http_list as _parse_list_header
from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
- builtin_str, getproxies, proxy_bypass)
+ builtin_str, getproxies, proxy_bypass, urlunparse)
from .cookies import RequestsCookieJar, cookiejar_from_dict
from .structures import CaseInsensitiveDict
-from .exceptions import MissingSchema, InvalidURL
+from .exceptions import InvalidURL
_hush_pyflakes = (RequestsCookieJar,)
@@ -61,7 +62,7 @@ def super_len(o):
return os.fstat(fileno).st_size
if hasattr(o, 'getvalue'):
- # e.g. BytesIO, cStringIO.StringI
+ # e.g. BytesIO, cStringIO.StringIO
return len(o.getvalue())
@@ -114,7 +115,7 @@ def get_netrc_auth(url):
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
- if name and name[0] != '<' and name[-1] != '>':
+ if name and isinstance(name, builtin_str) and name[0] != '<' and name[-1] != '>':
return os.path.basename(name)
@@ -287,6 +288,11 @@ def get_encodings_from_content(content):
:param content: bytestring to extract encodings from.
"""
+ warnings.warn((
+ 'In requests 3.0, get_encodings_from_content will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
charset_re = re.compile(r']', flags=re.I)
pragma_re = re.compile(r']', flags=re.I)
@@ -351,12 +357,14 @@ def get_unicode_from_response(r):
Tried:
1. charset from content-type
-
- 2. every encodings from `` ``
-
- 3. fall back and replace all unicode characters
+ 2. fall back and replace all unicode characters
"""
+ warnings.warn((
+ 'In requests 3.0, get_unicode_from_response will be removed. For '
+ 'more information, please see the discussion on issue #2266. (This'
+ ' warning should only appear once.)'),
+ DeprecationWarning)
tried_encodings = []
@@ -466,9 +474,10 @@ def is_valid_cidr(string_network):
return True
-def get_environ_proxies(url):
- """Return a dict of environment proxies."""
-
+def should_bypass_proxies(url):
+ """
+ Returns whether we should bypass proxies or not.
+ """
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
@@ -486,13 +495,13 @@ def get_environ_proxies(url):
for proxy_ip in no_proxy:
if is_valid_cidr(proxy_ip):
if address_in_network(ip, proxy_ip):
- return {}
+ return True
else:
for host in no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
- return {}
+ return True
# If the system proxy settings indicate that this URL should be bypassed,
# don't proxy.
@@ -506,12 +515,16 @@ def get_environ_proxies(url):
bypass = False
if bypass:
- return {}
+ return True
- # If we get here, we either didn't have no_proxy set or we're not going
- # anywhere that no_proxy applies to, and the system settings don't require
- # bypassing the proxy for the current URL.
- return getproxies()
+ return False
+
+def get_environ_proxies(url):
+ """Return a dict of environment proxies."""
+ if should_bypass_proxies(url):
+ return {}
+ else:
+ return getproxies()
def default_user_agent(name="python-requests"):
@@ -549,7 +562,8 @@ def default_headers():
return CaseInsensitiveDict({
'User-Agent': default_user_agent(),
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
- 'Accept': '*/*'
+ 'Accept': '*/*',
+ 'Connection': 'keep-alive',
})
@@ -564,7 +578,7 @@ def parse_header_links(value):
replace_chars = " '\""
- for val in value.split(","):
+ for val in re.split(", *<", value):
try:
url, params = val.split(";", 1)
except ValueError:
@@ -622,13 +636,18 @@ def guess_json_utf(data):
return None
-def except_on_missing_scheme(url):
- """Given a URL, raise a MissingSchema exception if the scheme is missing.
- """
- scheme, netloc, path, params, query, fragment = urlparse(url)
+def prepend_scheme_if_needed(url, new_scheme):
+ '''Given a URL that may or may not have a scheme, prepend the given scheme.
+ Does not replace a present scheme with the one provided as an argument.'''
+ scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
- if not scheme:
- raise MissingSchema('Proxy URLs must have explicit schemes.')
+ # urlparse is a finicky beast, and sometimes decides that there isn't a
+ # netloc present. Assume that it's being over-cautious, and switch netloc
+ # and path if urlparse decided there was no netloc.
+ if not netloc:
+ netloc, path = path, netloc
+
+ return urlunparse((scheme, netloc, path, params, query, fragment))
def get_auth_from_url(url):
@@ -661,3 +680,18 @@ def to_native_string(string, encoding='ascii'):
out = string.decode(encoding)
return out
+
+
+def urldefragauth(url):
+ """
+ Given a url remove the fragment and the authentication part
+ """
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
+ # see func:`prepend_scheme_if_needed`
+ if not netloc:
+ netloc, path = path, netloc
+
+ netloc = netloc.rsplit('@', 1)[-1]
+
+ return urlunparse((scheme, netloc, path, params, query, ''))
diff --git a/lib/rtorrent/__init__.py b/lib/rtorrent/__init__.py
index 290ef115..c24f608f 100644
--- a/lib/rtorrent/__init__.py
+++ b/lib/rtorrent/__init__.py
@@ -22,15 +22,16 @@ import os.path
import time
import xmlrpclib
-from rtorrent.common import find_torrent, \
- is_valid_port, convert_version_tuple_to_str
-from rtorrent.lib.torrentparser import TorrentParser
-from rtorrent.lib.xmlrpc.http import HTTPServerProxy
-from rtorrent.lib.xmlrpc.scgi import SCGIServerProxy
-from rtorrent.rpc import Method
-from rtorrent.lib.xmlrpc.basic_auth import BasicAuthTransport
-from rtorrent.torrent import Torrent
-from rtorrent.group import Group
+from rtorrent.common import (find_torrent, # @UnresolvedImport
+ is_valid_port, # @UnresolvedImport
+ convert_version_tuple_to_str) # @UnresolvedImport
+from rtorrent.lib.torrentparser import TorrentParser # @UnresolvedImport
+from rtorrent.lib.xmlrpc.http import HTTPServerProxy # @UnresolvedImport
+from rtorrent.lib.xmlrpc.scgi import SCGIServerProxy # @UnresolvedImport
+from rtorrent.rpc import Method # @UnresolvedImport
+from rtorrent.lib.xmlrpc.requests_transport import RequestsTransport # @UnresolvedImport @IgnorePep8
+from rtorrent.torrent import Torrent # @UnresolvedImport
+from rtorrent.group import Group # @UnresolvedImport
import rtorrent.rpc # @UnresolvedImport
__version__ = "0.2.9"
@@ -43,11 +44,12 @@ MIN_RTORRENT_VERSION_STR = convert_version_tuple_to_str(MIN_RTORRENT_VERSION)
class RTorrent:
+
""" Create a new rTorrent connection """
rpc_prefix = None
def __init__(self, uri, username=None, password=None,
- verify=False, sp=None, sp_kwargs=None):
+ verify=False, sp=None, sp_kwargs=None, tp_kwargs=None):
self.uri = uri # : From X{__init__(self, url)}
self.username = username
@@ -59,6 +61,10 @@ class RTorrent:
self.sp = sp
elif self.schema in ['http', 'https']:
self.sp = HTTPServerProxy
+ if self.schema == 'https':
+ self.isHttps = True
+ else:
+ self.isHttps = False
elif self.schema == 'scgi':
self.sp = SCGIServerProxy
else:
@@ -66,6 +72,8 @@ class RTorrent:
self.sp_kwargs = sp_kwargs or {}
+ self.tp_kwargs = tp_kwargs or {}
+
self.torrents = [] # : List of L{Torrent} instances
self._rpc_methods = [] # : List of rTorrent RPC methods
self._torrent_cache = []
@@ -80,9 +88,30 @@ class RTorrent:
if self.schema == 'scgi':
raise NotImplementedError()
+ if 'authtype' not in self.tp_kwargs:
+ authtype = None
+ else:
+ authtype = self.tp_kwargs['authtype']
+
+ if 'check_ssl_cert' not in self.tp_kwargs:
+ check_ssl_cert = True
+ else:
+ check_ssl_cert = self.tp_kwargs['check_ssl_cert']
+
+ if 'proxies' not in self.tp_kwargs:
+ proxies = None
+ else:
+ proxies = self.tp_kwargs['proxies']
+
return self.sp(
self.uri,
- transport=BasicAuthTransport(self.username, self.password),
+ transport=RequestsTransport(
+ use_https=self.isHttps,
+ authtype=authtype,
+ username=self.username,
+ password=self.password,
+ check_ssl_cert=check_ssl_cert,
+ proxies=proxies),
**self.sp_kwargs
)
@@ -90,8 +119,10 @@ class RTorrent:
def _verify_conn(self):
# check for rpc methods that should be available
- assert "system.client_version" in self._get_rpc_methods(), "Required RPC method not available."
- assert "system.library_version" in self._get_rpc_methods(), "Required RPC method not available."
+ assert "system.client_version" in self._get_rpc_methods(
+ ), "Required RPC method not available."
+ assert "system.library_version" in self._get_rpc_methods(
+ ), "Required RPC method not available."
# minimum rTorrent version check
assert self._meets_version_requirement() is True,\
@@ -152,7 +183,8 @@ class RTorrent:
for result in results:
results_dict = {}
# build results_dict
- for m, r in zip(retriever_methods, result[1:]): # result[0] is the info_hash
+ # result[0] is the info_hash
+ for m, r in zip(retriever_methods, result[1:]):
results_dict[m.varname] = rtorrent.rpc.process_result(m, r)
self.torrents.append(
@@ -199,7 +231,7 @@ class RTorrent:
return(func_name)
- def load_magnet(self, magneturl, info_hash, start=False, verbose=False, verify_load=True):
+ def load_magnet(self, magneturl, info_hash, start=False, verbose=False, verify_load=True): # @IgnorePep8
p = self._get_conn()
@@ -231,13 +263,13 @@ class RTorrent:
while i < MAX_RETRIES:
for torrent in self.get_torrents():
if torrent.info_hash == info_hash:
- if str(info_hash) not in str(torrent.name) :
+ if str(info_hash) not in str(torrent.name):
time.sleep(1)
i += 1
return(torrent)
- def load_torrent(self, torrent, start=False, verbose=False, verify_load=True):
+ def load_torrent(self, torrent, start=False, verbose=False, verify_load=True): # @IgnorePep8
"""
Loads torrent into rTorrent (with various enhancements)
@@ -354,7 +386,7 @@ class RTorrent:
if persistent is True:
p.group.insert_persistent_view('', name)
else:
- assert view is not None, "view parameter required on non-persistent groups"
+ assert view is not None, "view parameter required on non-persistent groups" # @IgnorePep8
p.group.insert('', name, view)
self._update_rpc_methods()
diff --git a/lib/rtorrent/lib/xmlrpc/requests_transport.py b/lib/rtorrent/lib/xmlrpc/requests_transport.py
new file mode 100644
index 00000000..d5e28743
--- /dev/null
+++ b/lib/rtorrent/lib/xmlrpc/requests_transport.py
@@ -0,0 +1,188 @@
+# Copyright (c) 2013-2015 Alexandre Beloin,
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""A transport for Python2/3 xmlrpc library using requests
+
+Support:
+-SSL with Basic and Digest authentication
+-Proxies
+"""
+
+try:
+ import xmlrpc.client as xmlrpc_client
+except ImportError:
+ import xmlrpclib as xmlrpc_client
+
+import traceback
+
+import requests
+from requests.exceptions import RequestException
+from requests.auth import HTTPBasicAuth
+from requests.auth import HTTPDigestAuth
+from requests.packages.urllib3 import disable_warnings # @UnresolvedImport
+
+
+class RequestsTransport(xmlrpc_client.Transport):
+
+ """Transport class for xmlrpc using requests"""
+
+ def __init__(self, use_https=True, authtype=None, username=None,
+ password=None, check_ssl_cert=True, proxies=None):
+ """Inits RequestsTransport.
+
+ Args:
+ use_https: If true, https else http
+ authtype: None, basic or digest
+ username: Username
+ password: Password
+ check_ssl_cert: Check SSL certificate
+ proxies: A dict of proxies(
+ Ex: {"http": "http://10.10.1.10:3128",
+ "https": "http://10.10.1.10:1080",})
+
+ Raises:
+ ValueError: Invalid info
+ """
+ # Python 2 can't use super on old style class.
+ if issubclass(xmlrpc_client.Transport, object):
+ super(RequestsTransport, self).__init__()
+ else:
+ xmlrpc_client.Transport.__init__(self)
+
+ self.user_agent = "Python Requests/" + requests.__version__
+
+ self._use_https = use_https
+ self._check_ssl_cert = check_ssl_cert
+
+ if authtype == "basic" or authtype == "digest":
+ self._authtype = authtype
+ else:
+ raise ValueError(
+ "Supported authentication are: basic and digest")
+ if authtype and (not username or not password):
+ raise ValueError(
+ "Username and password required when using authentication")
+
+ self._username = username
+ self._password = password
+ if proxies is None:
+ self._proxies = {}
+ else:
+ self._proxies = proxies
+
+ def request(self, host, handler, request_body, verbose=0):
+ """Replace the xmlrpc request function.
+
+ Process xmlrpc request via requests library.
+
+ Args:
+ host: Target host
+ handler: Target PRC handler.
+ request_body: XML-RPC request body.
+ verbose: Debugging flag.
+
+ Returns:
+ Parsed response.
+
+ Raises:
+ RequestException: Error in requests
+ """
+ if verbose:
+ self._debug()
+
+ if not self._check_ssl_cert:
+ disable_warnings()
+
+ headers = {'User-Agent': self.user_agent, 'Content-Type': 'text/xml', }
+
+ # Need to be done because the schema(http or https) is lost in
+ # xmlrpc.Transport's init.
+ if self._use_https:
+ url = "https://{host}/{handler}".format(host=host, handler=handler)
+ else:
+ url = "http://{host}/{handler}".format(host=host, handler=handler)
+
+ # TODO Construct kwargs query instead
+ try:
+ if self._authtype == "basic":
+ response = requests.post(
+ url,
+ data=request_body,
+ headers=headers,
+ verify=self._check_ssl_cert,
+ auth=HTTPBasicAuth(
+ self._username, self._password),
+ proxies=self._proxies)
+ elif self._authtype == "digest":
+ response = requests.post(
+ url,
+ data=request_body,
+ headers=headers,
+ verify=self._check_ssl_cert,
+ auth=HTTPDigestAuth(
+ self._username, self._password),
+ proxies=self._proxies)
+ else:
+ response = requests.post(
+ url,
+ data=request_body,
+ headers=headers,
+ verify=self._check_ssl_cert,
+ proxies=self._proxies)
+
+ response.raise_for_status()
+ except RequestException as error:
+ raise xmlrpc_client.ProtocolError(url,
+ error.message,
+ traceback.format_exc(),
+ response.headers)
+
+ return self.parse_response(response)
+
+ def parse_response(self, response):
+ """Replace the xmlrpc parse_response function.
+
+ Parse response.
+
+ Args:
+ response: Requests return data
+
+ Returns:
+ Response tuple and target method.
+ """
+ p, u = self.getparser()
+ p.feed(response.text)
+ p.close()
+ return u.close()
+
+ def _debug(self):
+ """Debug requests module.
+
+ Enable verbose logging from requests
+ """
+ # TODO Ugly
+ import logging
+ try:
+ import http.client as http_client
+ except ImportError:
+ import httplib as http_client
+
+ http_client.HTTPConnection.debuglevel = 1
+
+ logging.basicConfig()
+ logging.getLogger().setLevel(logging.DEBUG)
+ requests_log = logging.getLogger("requests.packages.urllib3")
+ requests_log.setLevel(logging.DEBUG)
+ requests_log.propagate = True
diff --git a/lib/trakt/trakt.py b/lib/trakt/trakt.py
index 90533afe..d2f615a6 100644
--- a/lib/trakt/trakt.py
+++ b/lib/trakt/trakt.py
@@ -9,7 +9,7 @@ class TraktAPI():
self.username = username
self.password = password
self.verify = not disable_ssl_verify
- self.timeout = timeout
+ self.timeout = timeout if timeout else None
self.api_url = 'https://api.trakt.tv/'
self.headers = {
'Content-Type': 'application/json',
@@ -29,7 +29,7 @@ class TraktAPI():
data=json.dumps(data), timeout=self.timeout, verify=self.verify)
resp.raise_for_status()
resp = resp.json()
- except (requests.HTTPError, requests.ConnectionError) as e:
+ except requests.RequestException as e:
code = getattr(e.response, 'status_code', None)
if not code:
# This is pretty much a fatal error if there is no status_code
@@ -68,7 +68,7 @@ class TraktAPI():
# convert response to json
resp = resp.json()
- except (requests.HTTPError, requests.ConnectionError) as e:
+ except requests.RequestException as e:
code = getattr(e.response, 'status_code', None)
if not code:
# This is pretty much a fatal error if there is no status_code
diff --git a/lib/tvdb_api/tvdb_api.py b/lib/tvdb_api/tvdb_api.py
index 7c7ee02d..fadbcee5 100644
--- a/lib/tvdb_api/tvdb_api.py
+++ b/lib/tvdb_api/tvdb_api.py
@@ -875,6 +875,9 @@ class Tvdb:
log().debug('Series results incomplete')
raise tvdb_showincomplete("Show search returned incomplete results (cannot find complete show on TVDB)")
+ if 'episode' not in epsEt:
+ return False
+
episodes = epsEt["episode"]
if not isinstance(episodes, list):
episodes = [episodes]
diff --git a/lib/tvrage_api/tvrage_api.py b/lib/tvrage_api/tvrage_api.py
index 8ed78223..d7a93a90 100644
--- a/lib/tvrage_api/tvrage_api.py
+++ b/lib/tvrage_api/tvrage_api.py
@@ -626,6 +626,9 @@ class TVRage:
raise tvrage_showincomplete(
"Show search returned incomplete results (cannot find complete show on TVRAGE)")
+ if 'episodelist' not in epsEt:
+ return False
+
seasons = epsEt['episodelist']['season']
if not isinstance(seasons, list):
seasons = [seasons]
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py
index 91f83b7b..bf089f9e 100755
--- a/sickbeard/__init__.py
+++ b/sickbeard/__init__.py
@@ -32,9 +32,9 @@ from github import Github
from sickbeard import providers, metadata, config, webserveInit
from sickbeard.providers.generic import GenericProvider
-from providers import ezrss, btn, newznab, womble, thepiratebay, torrentleech, kat, iptorrents, \
- omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, tntvillage, \
- freshontv, bitsoup, t411, tokyotoshokan
+from providers import ezrss, btn, newznab, womble, thepiratebay, oldpiratebay, torrentleech, kat, iptorrents, \
+ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, nextgen, speedcd, nyaatorrents, fanzub, torrentbytes, animezb, \
+ freshontv, bitsoup, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
naming_ep_type
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
@@ -52,6 +52,9 @@ from sickbeard.databases import mainDB, cache_db, failed_db
from lib.configobj import ConfigObj
+from lib import requests
+requests.packages.urllib3.disable_warnings()
+
PID = None
CFG = None
@@ -137,6 +140,7 @@ WEB_USERNAME = None
WEB_PASSWORD = None
WEB_HOST = None
WEB_IPV6 = None
+WEB_COOKIE_SECRET = None
PLAY_VIDEOS = False
@@ -220,6 +224,7 @@ UPDATE_FREQUENCY = None
DAILYSEARCH_STARTUP = False
BACKLOG_FREQUENCY = None
BACKLOG_STARTUP = False
+SHOWUPDATE_HOUR = 3
DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10
DEFAULT_DAILYSEARCH_FREQUENCY = 40
@@ -287,6 +292,7 @@ TORRENT_LABEL = ''
TORRENT_LABEL_ANIME = ''
TORRENT_VERIFY_CERT = False
TORRENT_RPCURL = 'transmission'
+TORRENT_AUTH_TYPE = 'none'
USE_KODI = False
KODI_ALWAYS_ON = True
@@ -402,6 +408,7 @@ TRAKT_USE_RECOMMENDED = False
TRAKT_SYNC = False
TRAKT_DEFAULT_INDEXER = None
TRAKT_DISABLE_SSL_VERIFY = False
+TRAKT_TIMEOUT = 30
USE_PYTIVO = False
PYTIVO_NOTIFY_ONSNATCH = False
@@ -497,14 +504,14 @@ def get_backlog_cycle_time():
def initialize(consoleLogging=True):
with INIT_LOCK:
- global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, API_KEY, API_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
+ global BRANCH, GIT_RESET, GIT_REMOTE, GIT_REMOTE_URL, CUR_COMMIT_HASH, CUR_COMMIT_BRANCH, ACTUAL_LOG_DIR, LOG_DIR, LOG_NR, LOG_SIZE, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, WEB_COOKIE_SECRET, API_KEY, API_ROOT, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, RANDOMIZE_PROVIDERS, CHECK_PROPERS_INTERVAL, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_CATEGORY_ANIME, SAB_HOST, \
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_CATEGORY_ANIME, NZBGET_PRIORITY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
- TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, TORRENT_RPCURL, \
+ TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_LABEL_ANIME, TORRENT_VERIFY_CERT, TORRENT_RPCURL, TORRENT_AUTH_TYPE, \
USE_KODI, KODI_ALWAYS_ON, KODI_NOTIFY_ONSNATCH, KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD, KODI_UPDATE_FULL, KODI_UPDATE_ONLYFIRST, \
KODI_UPDATE_LIBRARY, KODI_HOST, KODI_USERNAME, KODI_PASSWORD, BACKLOG_FREQUENCY, \
- USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_DISABLE_SSL_VERIFY, \
+ USE_TRAKT, TRAKT_USERNAME, TRAKT_PASSWORD, TRAKT_REMOVE_WATCHLIST, TRAKT_USE_WATCHLIST, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_DISABLE_SSL_VERIFY, TRAKT_TIMEOUT, \
USE_PLEX, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, \
PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \
showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, showList, loadingShowList, \
@@ -536,7 +543,7 @@ def initialize(consoleLogging=True):
METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, REQUIRE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \
ADD_SHOWS_WO_DIR, USE_SUBTITLES, SUBTITLES_LANGUAGES, SUBTITLES_DIR, SUBTITLES_SERVICES_LIST, SUBTITLES_SERVICES_ENABLED, SUBTITLES_HISTORY, SUBTITLES_FINDER_FREQUENCY, SUBTITLES_MULTI, subtitlesFinderScheduler, \
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, PROXY_INDEXERS, \
- AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
+ AUTOPOSTPROCESSER_FREQUENCY, SHOWUPDATE_HOUR, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
ANIME_SPLIT_HOME, SCENE_DEFAULT, PLAY_VIDEOS, DOWNLOAD_URL, BACKLOG_DAYS, GIT_ORG, GIT_REPO, GIT_USERNAME, GIT_PASSWORD, \
GIT_AUTOISSUES, gh
@@ -650,6 +657,9 @@ def initialize(consoleLogging=True):
ENCRYPTION_VERSION = check_setting_int(CFG, 'General', 'encryption_version', 0)
WEB_USERNAME = check_setting_str(CFG, 'General', 'web_username', '', censor_log=True)
WEB_PASSWORD = check_setting_str(CFG, 'General', 'web_password', '', censor_log=True)
+ WEB_COOKIE_SECRET = check_setting_str(CFG, 'General', 'web_cookie_secret', helpers.generateCookieSecret(), censor_log=True)
+ if not WEB_COOKIE_SECRET:
+ WEB_COOKIE_SECRET = helpers.generateCookieSecret()
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
PLAY_VIDEOS = bool(check_setting_int(CFG, 'General', 'play_videos', 0))
@@ -759,6 +769,10 @@ def initialize(consoleLogging=True):
if UPDATE_FREQUENCY < MIN_UPDATE_FREQUENCY:
UPDATE_FREQUENCY = MIN_UPDATE_FREQUENCY
+ SHOWUPDATE_HOUR = check_setting_int(CFG, 'General', 'showupdate_hour', 3)
+ if SHOWUPDATE_HOUR > 23: SHOWUPDATE_HOUR = 0;
+ elif SHOWUPDATE_HOUR < 0: SHOWUPDATE_HOUR = 0;
+
BACKLOG_DAYS = check_setting_int(CFG, 'General', 'backlog_days', 7)
NZB_DIR = check_setting_str(CFG, 'Blackhole', 'nzb_dir', '')
@@ -811,6 +825,7 @@ def initialize(consoleLogging=True):
TORRENT_LABEL_ANIME = check_setting_str(CFG, 'TORRENT', 'torrent_label_anime', '')
TORRENT_VERIFY_CERT = bool(check_setting_int(CFG, 'TORRENT', 'torrent_verify_cert', 0))
TORRENT_RPCURL = check_setting_str(CFG, 'TORRENT', 'torrent_rpcurl', 'transmission')
+ TORRENT_AUTH_TYPE = check_setting_str(CFG, 'TORRENT', 'torrent_auth_type', '')
USE_KODI = bool(check_setting_int(CFG, 'KODI', 'use_kodi', 0))
KODI_ALWAYS_ON = bool(check_setting_int(CFG, 'KODI', 'kodi_always_on', 1))
@@ -922,6 +937,7 @@ def initialize(consoleLogging=True):
TRAKT_SYNC = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync', 0))
TRAKT_DEFAULT_INDEXER = check_setting_int(CFG, 'Trakt', 'trakt_default_indexer', 1)
TRAKT_DISABLE_SSL_VERIFY = bool(check_setting_int(CFG, 'Trakt', 'trakt_disable_ssl_verify', 0))
+ TRAKT_TIMEOUT = check_setting_int(CFG, 'Trakt', 'trakt_timeout', 30)
CheckSection(CFG, 'pyTivo')
USE_PYTIVO = bool(check_setting_int(CFG, 'pyTivo', 'use_pytivo', 0))
@@ -1193,7 +1209,7 @@ def initialize(consoleLogging=True):
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
cycleTime=datetime.timedelta(hours=1),
threadName="SHOWUPDATER",
- start_time=datetime.time(hour=3)) # 3 AM
+ start_time=datetime.time(hour=SHOWUPDATE_HOUR)) # 3 AM
# searchers
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
@@ -1464,6 +1480,7 @@ def save_config():
new_config['General']['web_root'] = WEB_ROOT
new_config['General']['web_username'] = WEB_USERNAME
new_config['General']['web_password'] = helpers.encrypt(WEB_PASSWORD, ENCRYPTION_VERSION)
+ new_config['General']['web_cookie_secret'] = WEB_COOKIE_SECRET
new_config['General']['play_videos'] = int(PLAY_VIDEOS)
new_config['General']['download_url'] = DOWNLOAD_URL
new_config['General']['localhost_ip'] = LOCALHOST_IP
@@ -1484,6 +1501,7 @@ def save_config():
new_config['General']['dailysearch_frequency'] = int(DAILYSEARCH_FREQUENCY)
new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY)
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
+ new_config['General']['showupdate_hour'] = int(SHOWUPDATE_HOUR)
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
new_config['General']['randomize_providers'] = int(RANDOMIZE_PROVIDERS)
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
@@ -1687,6 +1705,7 @@ def save_config():
new_config['TORRENT']['torrent_label_anime'] = TORRENT_LABEL_ANIME
new_config['TORRENT']['torrent_verify_cert'] = int(TORRENT_VERIFY_CERT)
new_config['TORRENT']['torrent_rpcurl'] = TORRENT_RPCURL
+ new_config['TORRENT']['torrent_auth_type'] = TORRENT_AUTH_TYPE
new_config['KODI'] = {}
new_config['KODI']['use_kodi'] = int(USE_KODI)
@@ -1808,6 +1827,7 @@ def save_config():
new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC)
new_config['Trakt']['trakt_default_indexer'] = int(TRAKT_DEFAULT_INDEXER)
new_config['Trakt']['trakt_disable_ssl_verify'] = int(TRAKT_DISABLE_SSL_VERIFY)
+ new_config['Trakt']['trakt_timeout'] = int(TRAKT_TIMEOUT)
new_config['pyTivo'] = {}
new_config['pyTivo']['use_pytivo'] = int(USE_PYTIVO)
diff --git a/sickbeard/clients/download_station.py b/sickbeard/clients/download_station.py
index 62500ecc..25dc9fce 100644
--- a/sickbeard/clients/download_station.py
+++ b/sickbeard/clients/download_station.py
@@ -36,7 +36,7 @@ class DownloadStationAPI(GenericClient):
auth_url = self.host + 'webapi/auth.cgi?api=SYNO.API.Auth&version=2&method=login&account=' + self.username + '&passwd=' + self.password + '&session=DownloadStation&format=sid'
try:
- self.response = self.session.get(auth_url)
+ self.response = self.session.get(auth_url, verify=False)
self.auth = self.response.json()['data']['sid']
except:
return None
diff --git a/sickbeard/clients/rtorrent.py b/sickbeard/clients/rtorrent.py
index 82f76046..3a8a865d 100644
--- a/sickbeard/clients/rtorrent.py
+++ b/sickbeard/clients/rtorrent.py
@@ -37,8 +37,15 @@ class rTorrentAPI(GenericClient):
if not self.host:
return
+ tp_kwargs = {}
+ if sickbeard.TORRENT_AUTH_TYPE is not 'none':
+ tp_kwargs['authtype'] = sickbeard.TORRENT_AUTH_TYPE
+
+ if not sickbeard.TORRENT_VERIFY_CERT:
+ tp_kwargs['check_ssl_cert'] = False
+
if self.username and self.password:
- self.auth = RTorrent(self.host, self.username, self.password)
+ self.auth = RTorrent(self.host, self.username, self.password, True, tp_kwargs=tp_kwargs)
else:
self.auth = RTorrent(self.host, None, None, True)
diff --git a/sickbeard/clients/utorrent.py b/sickbeard/clients/utorrent.py
index fd9adc2e..58b4e029 100644
--- a/sickbeard/clients/utorrent.py
+++ b/sickbeard/clients/utorrent.py
@@ -32,8 +32,14 @@ class uTorrentAPI(GenericClient):
def _request(self, method='get', params={}, files=None):
- params.update({'token': self.auth})
- return super(uTorrentAPI, self)._request(method=method, params=params, files=files)
+ #Workaround for uTorrent 2.2.1
+ #Need a odict but only supported in 2.7+ and sickrage is 2.6+
+ ordered_params = {'token': self.auth}
+
+ for k,v in params.iteritems():
+ ordered_params.update({k: v})
+
+ return super(uTorrentAPI, self)._request(method=method, params=ordered_params, files=files)
def _get_auth(self):
diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py
index d7e84b40..d54e4d2d 100644
--- a/sickbeard/dailysearcher.py
+++ b/sickbeard/dailysearcher.py
@@ -49,7 +49,7 @@ class DailySearcher():
if network_timezones.network_dict:
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
else:
- curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
+ curDate = (datetime.date.today() + datetime.timedelta(days=2)).toordinal()
curTime = datetime.datetime.now(network_timezones.sb_timezone)
diff --git a/sickbeard/failedProcessor.py b/sickbeard/failedProcessor.py
index 80255e97..6c1245bb 100644
--- a/sickbeard/failedProcessor.py
+++ b/sickbeard/failedProcessor.py
@@ -67,7 +67,7 @@ class FailedProcessor(object):
for episode in parsed.episode_numbers:
segment = parsed.show.getEpisode(parsed.season_number, episode)
- cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, segment)
+ cur_failed_queue_item = search_queue.FailedQueueItem(parsed.show, [segment])
sickbeard.searchQueueScheduler.action.add_item(cur_failed_queue_item)
return True
diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py
index 547fdddd..262e3382 100644
--- a/sickbeard/helpers.py
+++ b/sickbeard/helpers.py
@@ -19,6 +19,7 @@
from __future__ import with_statement
import os
+import ctypes
import random
import re
import shutil
@@ -35,6 +36,7 @@ import uuid
import base64
import zipfile
import datetime
+import errno
import sickbeard
import subliminal
@@ -43,6 +45,8 @@ import requests
import requests.exceptions
import xmltodict
+import subprocess
+
from sickbeard.exceptions import MultipleShowObjectsException, ex
from sickbeard import logger, classes
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions
@@ -343,7 +347,7 @@ def listMediaFiles(path):
def copyFile(srcFile, destFile):
if isPosix():
- os.system('cp "%s" "%s"' % (srcFile, destFile))
+ subprocess.call(['cp', srcFile, destFile])
else:
ek.ek(shutil.copyfile, srcFile, destFile)
@@ -666,21 +670,26 @@ def get_all_episodes_from_absolute_number(show, absolute_numbers, indexer_id=Non
return (season, episodes)
-def sanitizeSceneName(name, ezrss=False):
+def sanitizeSceneName(name, ezrss=False, anime=False):
"""
Takes a show name and returns the "scenified" version of it.
ezrss: If true the scenified version will follow EZRSS's cracksmoker rules as best as possible
+
+ anime: Some show have a ' in their name(Kuroko's Basketball) and is needed for search.
Returns: A string containing the scene version of the show name given.
"""
if name:
- if not ezrss:
- bad_chars = u",:()'!?\u2019"
+ # anime: removed ' for Kuroko's Basketball
+ if anime:
+ bad_chars = u",:()!?\u2019"
# ezrss leaves : and ! in their show names as far as I can tell
- else:
+ elif ezrss:
bad_chars = u",()'?\u2019"
+ else:
+ bad_chars = u",:()'!?\u2019"
# strip out any bad chars
for x in bad_chars:
@@ -943,8 +952,21 @@ def is_hidden_folder(folder):
On Linux based systems hidden folders start with . (dot)
folder: Full path of folder to check
"""
+ def is_hidden(filepath):
+ name = os.path.basename(os.path.abspath(filepath))
+ return name.startswith('.') or has_hidden_attribute(filepath)
+
+ def has_hidden_attribute(filepath):
+ try:
+ attrs = ctypes.windll.kernel32.GetFileAttributesW(unicode(filepath))
+ assert attrs != -1
+ result = bool(attrs & 2)
+ except (AttributeError, AssertionError):
+ result = False
+ return result
+
if ek.ek(os.path.isdir, folder):
- if ek.ek(os.path.basename, folder).startswith('.'):
+ if is_hidden(folder):
return True
return False
@@ -1108,8 +1130,13 @@ def touchFile(fname, atime=None):
with file(fname, 'a'):
os.utime(fname, (atime, atime))
return True
- except:
- logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
+ except Exception as e:
+ if e.errno == errno.ENOSYS:
+ logger.log(u"File air date stamping not available on your OS", logger.DEBUG)
+ elif e.errno == errno.EACCES:
+ logger.log(u"File air date stamping failed(Permission denied). Check permissions for file: {0}".format(fname), logger.ERROR)
+ else:
+ logger.log(u"File air date stamping failed. The error is: {0} and the message is: {1}.".format(e.errno, e.strerror), logger.ERROR)
pass
return False
@@ -1336,3 +1363,7 @@ if __name__ == '__main__':
def remove_article(text=''):
return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
+
+def generateCookieSecret():
+
+ return base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes)
diff --git a/sickbeard/logger.py b/sickbeard/logger.py
index 1d289f1d..303d3cc1 100644
--- a/sickbeard/logger.py
+++ b/sickbeard/logger.py
@@ -171,8 +171,9 @@ class Logger(object):
if match:
level = match.group(2)
if reverseNames[level] == ERROR:
- paste_data = "".join(log_data[i:50])
- gist = gh.get_user().create_gist(True, {"sickrage.log": InputFileContent(paste_data)})
+ paste_data = "".join(log_data[i:i+50])
+ if paste_data:
+ gist = gh.get_user().create_gist(True, {"sickrage.log": InputFileContent(paste_data)})
break
message = u"### INFO\n"
@@ -189,7 +190,7 @@ class Logger(object):
message += u"---\n"
message += u"_STAFF NOTIFIED_: @SiCKRAGETV/owners @SiCKRAGETV/moderators"
- issue = gh.get_organization(gh_org).get_repo(gh_repo).create_issue("[APP SUBMITTED]: " + curError.title, message)
+ issue = gh.get_organization(gh_org).get_repo(gh_repo).create_issue("[APP SUBMITTED]: " + str(curError.title), message)
if issue:
self.log('Your issue ticket #%s was submitted successfully!' % issue.number)
diff --git a/sickbeard/notifiers/trakt.py b/sickbeard/notifiers/trakt.py
index 2d0e8526..2796f1f8 100644
--- a/sickbeard/notifiers/trakt.py
+++ b/sickbeard/notifiers/trakt.py
@@ -47,7 +47,7 @@ class TraktNotifier:
"""
trakt_id = sickbeard.indexerApi(ep_obj.show.indexer).config['trakt_id']
- trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY)
+ trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
if sickbeard.USE_TRAKT:
try:
@@ -117,7 +117,7 @@ class TraktNotifier:
Returns: True if the request succeeded, False otherwise
"""
try:
- trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, username, password, disable_ssl)
+ trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, username, password, disable_ssl, sickbeard.TRAKT_TIMEOUT)
trakt_api.validateAccount()
return "Test notice sent successfully to Trakt"
except (traktException, traktAuthException, traktServerBusy) as e:
diff --git a/sickbeard/postProcessor.py b/sickbeard/postProcessor.py
index d4d2b03f..e3f23023 100644
--- a/sickbeard/postProcessor.py
+++ b/sickbeard/postProcessor.py
@@ -19,6 +19,7 @@
from __future__ import with_statement
import glob
+import fnmatch
import os
import re
import subprocess
@@ -154,13 +155,19 @@ class PostProcessor(object):
Returns: A list containing all files which are associated to the given file
"""
+ def recursive_glob(treeroot, pattern):
+ results = []
+ for base, dirs, files in os.walk(treeroot):
+ goodfiles = fnmatch.filter(files, pattern)
+ results.extend(os.path.join(base, f) for f in goodfiles)
+ return results
if not file_path:
return []
file_path_list = []
- base_name = file_path.rpartition('.')[0]
+ base_name = ek.ek(os.path.basename, file_path).rpartition('.')[0]
if not base_name_only:
base_name = base_name + '.'
@@ -171,8 +178,8 @@ class PostProcessor(object):
# don't confuse glob with chars we didn't mean to use
base_name = re.sub(r'[\[\]\*\?]', r'[\g<0>]', base_name)
-
- for associated_file_path in ek.ek(glob.glob, base_name + '*'):
+
+ for associated_file_path in ek.ek(recursive_glob, self.folder_path, base_name + '*'):
# only add associated to list
if associated_file_path == file_path:
continue
diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py
index ded1e8ca..a91e8721 100644
--- a/sickbeard/processTV.py
+++ b/sickbeard/processTV.py
@@ -251,7 +251,7 @@ def validateDir(path, dirName, nzbNameOriginal, failed, result):
process_failed(os.path.join(path, dirName), nzbNameOriginal, result)
return False
- if helpers.is_hidden_folder(dirName):
+ if helpers.is_hidden_folder(os.path.join(path, dirName)):
result.output += logHelper(u"Ignoring hidden folder: " + dirName, logger.DEBUG)
return False
diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py
index d97c4aa4..d8af8187 100755
--- a/sickbeard/providers/__init__.py
+++ b/sickbeard/providers/__init__.py
@@ -20,6 +20,7 @@ __all__ = ['ezrss',
'womble',
'btn',
'thepiratebay',
+ 'oldpiratebay',
'kat',
'torrentleech',
'scc',
@@ -39,6 +40,9 @@ __all__ = ['ezrss',
'bitsoup',
't411',
'tokyotoshokan',
+ 'alpharatio',
+ 'shazbat',
+ 'rarbg',
'tntvillage',
]
diff --git a/sickbeard/providers/alpharatio.py b/sickbeard/providers/alpharatio.py
new file mode 100755
index 00000000..a4f5c993
--- /dev/null
+++ b/sickbeard/providers/alpharatio.py
@@ -0,0 +1,280 @@
+# Author: Bill Nasty
+# URL: https://github.com/SiCKRAGETV/SickRage
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage. If not, see .
+
+import re
+import traceback
+import datetime
+import urlparse
+
+import sickbeard
+import generic
+
+from sickbeard.common import Quality, cpu_presets
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import db
+from sickbeard import classes
+from sickbeard import helpers
+from sickbeard import show_name_helpers
+from sickbeard.common import Overview
+from sickbeard.exceptions import ex
+from sickbeard import clients
+from lib import requests
+from lib.requests import exceptions
+from sickbeard.bs4_parser import BS4Parser
+from lib.unidecode import unidecode
+from sickbeard.helpers import sanitizeSceneName
+
+class AlphaRatioProvider(generic.TorrentProvider):
+
+ def __init__(self):
+
+ generic.TorrentProvider.__init__(self, "AlphaRatio")
+
+ self.supportsBacklog = True
+
+ self.enabled = False
+ self.username = None
+ self.password = None
+ self.ratio = None
+ self.minseed = None
+ self.minleech = None
+
+ self.cache = AlphaRatioCache(self)
+
+ self.urls = {'base_url': 'https://alpharatio.cc/',
+ 'login': 'https://alpharatio.cc/login.php',
+ 'detail': 'https://alpharatio.cc/torrents.php?torrentid=%s',
+ 'search': 'https://alpharatio.cc/torrents.php?searchstr=%s%s',
+ 'download': 'https://alpharatio.cc/%s',
+ }
+
+ self.catagories = "&filter_cat[1]=1&filter_cat[2]=1&filter_cat[3]=1&filter_cat[4]=1&filter_cat[5]=1"
+
+ self.url = self.urls['base_url']
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'alpharatio.png'
+
+ def getQuality(self, item, anime=False):
+
+ quality = Quality.sceneQuality(item[0], anime)
+ return quality
+
+ def _doLogin(self):
+ login_params = {'username': self.username,
+ 'password': self.password,
+ 'remember_me': 'on',
+ 'login': 'submit',
+ }
+
+ self.session = requests.Session()
+
+ try:
+ response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
+ except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+ logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+ return False
+
+ if re.search('Invalid Username/password', response.text) \
+ or re.search('Login :: AlphaRatio.cc ', response.text) \
+ or response.status_code == 401:
+ logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+ return False
+
+ return True
+
+ def _get_season_search_strings(self, ep_obj):
+
+ search_string = {'Season': []}
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ if ep_obj.show.air_by_date or ep_obj.show.sports:
+ ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
+ elif ep_obj.show.anime:
+ ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
+ else:
+ ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) #1) showName SXX
+
+ search_string['Season'].append(ep_string)
+
+ return [search_string]
+
+ def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+ search_string = {'Episode': []}
+
+ if not ep_obj:
+ return []
+
+ if self.show.air_by_date:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|')
+ search_string['Episode'].append(ep_string)
+ elif self.show.sports:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|') + '|' + \
+ ep_obj.airdate.strftime('%b')
+ search_string['Episode'].append(ep_string)
+ elif self.show.anime:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ "%i" % int(ep_obj.scene_absolute_number)
+ search_string['Episode'].append(ep_string)
+ else:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = show_name_helpers.sanitizeSceneName(show_name) + ' ' + \
+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+
+ search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
+
+ return [search_string]
+
+ def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+
+ results = []
+ items = {'Season': [], 'Episode': [], 'RSS': []}
+
+ if not self._doLogin():
+ return results
+
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+
+ if isinstance(search_string, unicode):
+ search_string = unidecode(search_string)
+
+ searchURL = self.urls['search'] % (search_string, self.catagories)
+
+ data = self.getURL(searchURL)
+ if not data:
+ continue
+
+ try:
+ with BS4Parser(data, features=["html5lib", "permissive"]) as html:
+ torrent_table = html.find('table', attrs={'id': 'torrent_table'})
+ torrent_rows = torrent_table.find_all('tr') if torrent_table else []
+
+ #Continue only if one Release is found
+ if len(torrent_rows) < 2:
+ logger.log(u"The Data returned from " + self.name + " does not contain any torrents",
+ logger.DEBUG)
+ continue
+
+ for result in torrent_rows[1:]:
+ cells = result.find_all('td')
+
+ link = cells[1].find('a', attrs={'title': 'Download'})
+ full_id = link['href'].replace('torrents.php?action=download&id=', '')
+ torrent_id = full_id.split("&")[0]
+
+ try:
+ title = cells[1].find('a', attrs={'title': 'View Auto Torrent'}).contents[0]
+ download_url = self.urls['download'] % (link['href'])
+ id = torrent_id
+ seeders = cells[6].contents[0]
+ leechers = cells[7].contents[0]
+ except (AttributeError, TypeError):
+ continue
+
+ #Filter unseeded torrent
+ if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+ continue
+
+ if not title or not download_url:
+ continue
+
+ item = title, download_url, id, seeders, leechers
+ logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG)
+
+ items[mode].append(item)
+
+ except Exception, e:
+ logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR)
+
+ #For each search mode sort all the items by seeders
+ items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
+ results += items[mode]
+
+ return results
+
+ def _get_title_and_url(self, item):
+
+ title, url, id, seeders, leechers = item
+
+ if title:
+ title = u'' + title
+ title = title.replace(' ', '.')
+
+ if url:
+ url = str(url).replace('&', '&')
+
+ return (title, url)
+
+ def findPropers(self, search_date=datetime.datetime.today()):
+
+ results = []
+
+ myDB = db.DBConnection()
+ sqlResults = myDB.select(
+ 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+ ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+ ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+ ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+ ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+ )
+
+ if not sqlResults:
+ return []
+
+ for sqlshow in sqlResults:
+ self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+ if self.show:
+ curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+
+ searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+ for item in self._doSearch(searchString[0]):
+ title, url = self._get_title_and_url(item)
+ results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
+
+ return results
+
+ def seedRatio(self):
+ return self.ratio
+
+class AlphaRatioCache(tvcache.TVCache):
+
+ def __init__(self, provider):
+
+ tvcache.TVCache.__init__(self, provider)
+
+ # only poll AlphaRatio every 20 minutes max
+ self.minTime = 20
+
+ def _getRSSData(self):
+ search_params = {'RSS': ['']}
+ return {'entries': self.provider._doSearch(search_params)}
+
+provider = AlphaRatioProvider()
diff --git a/sickbeard/providers/freshontv.py b/sickbeard/providers/freshontv.py
index e06e9bad..715c8b53 100755
--- a/sickbeard/providers/freshontv.py
+++ b/sickbeard/providers/freshontv.py
@@ -111,6 +111,10 @@ class FreshOnTVProvider(generic.TorrentProvider):
if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
return False
+
+ if re.search('DDoS protection by CloudFlare', response.text):
+ logger.log(u'Unable to login to ' + self.name + ' due to CloudFlare DDoS javascript check.', logger.ERROR)
+ return False
try:
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
diff --git a/sickbeard/providers/iptorrents.py b/sickbeard/providers/iptorrents.py
index bc45600b..0d2d0d6b 100644
--- a/sickbeard/providers/iptorrents.py
+++ b/sickbeard/providers/iptorrents.py
@@ -54,9 +54,9 @@ class IPTorrentsProvider(generic.TorrentProvider):
self.cache = IPTorrentsCache(self)
- self.urls = {'base_url': 'https://www.iptorrents.eu',
- 'login': 'https://www.iptorrents.eu/torrents/',
- 'search': 'https://www.iptorrents.eu/torrents/?%s%s&q=%s&qf=ti',
+ self.urls = {'base_url': 'https://iptorrents.eu',
+ 'login': 'https://iptorrents.eu/torrents/',
+ 'search': 'https://iptorrents.eu/torrents/?%s%s&q=%s&qf=ti',
}
self.url = self.urls['base_url']
diff --git a/sickbeard/providers/kat.py b/sickbeard/providers/kat.py
index d9692a85..7e3091a8 100644
--- a/sickbeard/providers/kat.py
+++ b/sickbeard/providers/kat.py
@@ -55,7 +55,7 @@ class KATProvider(generic.TorrentProvider):
self.cache = KATCache(self)
- self.urls = {'base_url': 'http://kickass.so/'}
+ self.urls = {'base_url': 'http://kickass.to/'}
self.url = self.urls['base_url']
@@ -239,7 +239,7 @@ class KATProvider(generic.TorrentProvider):
id = item['guid']
title = item['title']
url = item['torrent_magneturi']
- verified = bool(item['torrent_verified'] or 0)
+ verified = bool(int(item['torrent_verified']) or 0)
seeders = int(item['torrent_seeds'])
leechers = int(item['torrent_peers'])
size = int(item['torrent_contentlength'])
diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py
index da02d329..15a06bbb 100755
--- a/sickbeard/providers/newznab.py
+++ b/sickbeard/providers/newznab.py
@@ -360,16 +360,16 @@ class NewznabProvider(generic.NZBProvider):
try:
result_date = datetime.datetime(*item['published_parsed'][0:6])
- except AttributeError:
+ except (AttributeError, KeyError):
try:
result_date = datetime.datetime(*item['updated_parsed'][0:6])
- except AttributeError:
+ except (AttributeError, KeyError):
try:
result_date = datetime.datetime(*item['created_parsed'][0:6])
- except AttributeError:
+ except (AttributeError, KeyError):
try:
result_date = datetime.datetime(*item['date'][0:6])
- except AttributeError:
+ except (AttributeError, KeyError):
logger.log(u"Unable to figure out the date for entry " + title + ", skipping it")
continue
diff --git a/sickbeard/providers/nyaatorrents.py b/sickbeard/providers/nyaatorrents.py
index 09fd7b17..46468628 100644
--- a/sickbeard/providers/nyaatorrents.py
+++ b/sickbeard/providers/nyaatorrents.py
@@ -61,7 +61,7 @@ class NyaaProvider(generic.TorrentProvider):
return generic.TorrentProvider.findSearchResults(self, show, episodes, search_mode, manualSearch)
def _get_season_search_strings(self, ep_obj):
- return show_name_helpers.makeSceneShowSearchStrings(self.show)
+ return show_name_helpers.makeSceneShowSearchStrings(self.show, anime=True)
def _get_episode_search_strings(self, ep_obj, add_string=''):
return self._get_season_search_strings(ep_obj)
diff --git a/sickbeard/providers/oldpiratebay.py b/sickbeard/providers/oldpiratebay.py
new file mode 100644
index 00000000..fb0a6503
--- /dev/null
+++ b/sickbeard/providers/oldpiratebay.py
@@ -0,0 +1,340 @@
+# Author: Mr_Orange
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage. If not, see .
+
+from __future__ import with_statement
+
+import time
+import re
+import urllib, urllib2, urlparse
+import sys
+import os
+import datetime
+
+import sickbeard
+import generic
+from sickbeard.common import Quality, cpu_presets
+from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
+from sickbeard import db
+from sickbeard import classes
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import helpers
+from sickbeard import clients
+from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName
+from sickbeard.common import Overview
+from sickbeard.exceptions import ex
+from sickbeard import encodingKludge as ek
+from lib import requests
+from lib.requests import exceptions
+from lib.unidecode import unidecode
+
+
+class OldPirateBayProvider(generic.TorrentProvider):
+ def __init__(self):
+
+ generic.TorrentProvider.__init__(self, "OldPirateBay")
+
+ self.supportsBacklog = True
+
+ self.enabled = False
+ self.ratio = None
+ self.confirmed = False
+ self.minseed = None
+ self.minleech = None
+
+ self.cache = OldPirateBayCache(self)
+
+ self.urls = {'base_url': 'https://oldpiratebay.org/'}
+
+ self.url = self.urls['base_url']
+
+ self.searchurl = self.url + 'search.php?q=%s&Torrent_sort=seeders.desc' # order by seed
+
+ self.re_title_url = '/torrent/(?P\d+)/(?P.*?)//1".+?(?Pmagnet.*?)//1".+?(?P\d+).+?(?P\d+)'
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'oldpiratebay.png'
+
+ def getQuality(self, item, anime=False):
+
+ quality = Quality.sceneQuality(item[0], anime)
+ return quality
+
+ def _reverseQuality(self, quality):
+
+ quality_string = ''
+
+ if quality == Quality.SDTV:
+ quality_string = 'HDTV x264'
+ if quality == Quality.SDDVD:
+ quality_string = 'DVDRIP'
+ elif quality == Quality.HDTV:
+ quality_string = '720p HDTV x264'
+ elif quality == Quality.FULLHDTV:
+ quality_string = '1080p HDTV x264'
+ elif quality == Quality.RAWHDTV:
+ quality_string = '1080i HDTV mpeg2'
+ elif quality == Quality.HDWEBDL:
+ quality_string = '720p WEB-DL h264'
+ elif quality == Quality.FULLHDWEBDL:
+ quality_string = '1080p WEB-DL h264'
+ elif quality == Quality.HDBLURAY:
+ quality_string = '720p Bluray x264'
+ elif quality == Quality.FULLHDBLURAY:
+ quality_string = '1080p Bluray x264'
+
+ return quality_string
+
+ def _find_season_quality(self, title, torrent_id, ep_number):
+ """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """
+
+ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx',
+ 'vob', 'dvr-ms', 'wtv', 'ts'
+ 'ogv', 'rar', 'zip', 'mp4']
+
+ quality = Quality.UNKNOWN
+
+ fileName = None
+
+ fileURL = self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)
+ data = self.getURL(fileURL)
+ if not data:
+ return None
+
+ filesList = re.findall('(.*?)', data)
+
+ if not filesList:
+ logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR)
+
+ videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList)
+
+ #Filtering SingleEpisode/MultiSeason Torrent
+ if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1):
+ logger.log(
+ u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str(
+ len(videoFiles)), logger.DEBUG)
+ logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...",
+ logger.DEBUG)
+ return None
+
+ if Quality.sceneQuality(title) != Quality.UNKNOWN:
+ return title
+
+ for fileName in videoFiles:
+ quality = Quality.sceneQuality(os.path.basename(fileName))
+ if quality != Quality.UNKNOWN: break
+
+ if fileName is not None and quality == Quality.UNKNOWN:
+ quality = Quality.assumeQuality(os.path.basename(fileName))
+
+ if quality == Quality.UNKNOWN:
+ logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG)
+ return None
+
+ try:
+ myParser = NameParser(showObj=self.show)
+ parse_result = myParser.parse(fileName)
+ except (InvalidNameException, InvalidShowException):
+ return None
+
+ logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG)
+
+ if parse_result.series_name and parse_result.season_number:
+ title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality(
+ quality)
+
+ return title
+
+ def _get_season_search_strings(self, ep_obj):
+
+ search_string = {'Season': []}
+ for show_name in set(allPossibleShowNames(self.show)):
+ if ep_obj.show.air_by_date or ep_obj.show.sports:
+ ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
+ search_string['Season'].append(ep_string)
+ ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0]
+ search_string['Season'].append(ep_string)
+ elif ep_obj.show.anime:
+ ep_string = show_name + ' ' + "%02d" % ep_obj.scene_absolute_number
+ search_string['Season'].append(ep_string)
+ else:
+ ep_string = show_name + ' S%02d' % int(ep_obj.scene_season)
+ search_string['Season'].append(ep_string)
+ ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*'
+ search_string['Season'].append(ep_string)
+
+ search_string['Season'].append(ep_string)
+
+ return [search_string]
+
+ def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+ search_string = {'Episode': []}
+
+ if self.show.air_by_date:
+ for show_name in set(allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', ' ')
+ search_string['Episode'].append(ep_string)
+ elif self.show.sports:
+ for show_name in set(allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ str(ep_obj.airdate).replace('-', '|') + '|' + \
+ ep_obj.airdate.strftime('%b')
+ search_string['Episode'].append(ep_string)
+ elif self.show.anime:
+ for show_name in set(allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ "%02i" % int(ep_obj.scene_absolute_number)
+ search_string['Episode'].append(ep_string)
+ else:
+ for show_name in set(allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + ' ' + \
+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode} + '|' + \
+ sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+ search_string['Episode'].append(re.sub('\s+', ' ', ep_string))
+
+ return [search_string]
+
+ def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+
+ results = []
+ items = {'Season': [], 'Episode': [], 'RSS': []}
+
+ for mode in search_params.keys():
+ for search_string in search_params[mode]:
+ if isinstance(search_string, unicode):
+ search_string = unidecode(search_string)
+
+ if mode != 'RSS':
+ searchURL = self.searchurl % (urllib.quote(search_string))
+ else:
+ searchURL = self.url + 'tv/latest/'
+
+ logger.log(u"Search string: " + searchURL, logger.DEBUG)
+
+ data = self.getURL(searchURL)
+ if not data:
+ continue
+
+ re_title_url = self.proxy._buildRE(self.re_title_url)
+
+ match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
+
+ for torrent in match:
+ title = torrent.group('title').replace('_',
+ '.') #Do not know why but SickBeard skip release with '_' in name
+ url = torrent.group('url')
+ print 'torrent url: ' + url
+ id = int(torrent.group('id'))
+ seeders = int(torrent.group('seeders'))
+ leechers = int(torrent.group('leechers'))
+
+ #Filter unseeded torrent
+ if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
+ continue
+
+ #Accept Torrent only from Good People for every Episode Search
+ if self.confirmed and re.search('(VIP|Trusted|Helper|Moderator)', torrent.group(0)) is None:
+ logger.log(u"OldPirateBay Provider found result " + torrent.group(
+ 'title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG)
+ continue
+
+ #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent
+ if mode == 'Season' and search_mode == 'sponly':
+ ep_number = int(epcount / len(set(allPossibleShowNames(self.show))))
+ title = self._find_season_quality(title, id, ep_number)
+
+ if not title or not url:
+ continue
+
+ item = title, url, id, seeders, leechers
+
+ items[mode].append(item)
+
+ #For each search mode sort all the items by seeders
+ items[mode].sort(key=lambda tup: tup[3], reverse=True)
+
+ results += items[mode]
+
+ return results
+
+ def _get_title_and_url(self, item):
+
+ title, url, id, seeders, leechers = item
+
+ if title:
+ title = u'' + title.replace(' ', '.')
+
+ if url:
+ url = url.replace('&', '&')
+
+ return (title, url)
+
+ def findPropers(self, search_date=datetime.datetime.today()):
+
+ results = []
+
+ myDB = db.DBConnection()
+ sqlResults = myDB.select(
+ 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+ ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+ ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+ ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+ ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+ )
+
+ if not sqlResults:
+ return []
+
+ for sqlshow in sqlResults:
+ self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+
+ if self.show:
+ curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+
+ searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+ for item in self._doSearch(searchString[0]):
+ title, url = self._get_title_and_url(item)
+ results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
+
+ return results
+
+ def seedRatio(self):
+ return self.ratio
+
+
+class OldPirateBayCache(tvcache.TVCache):
+ def __init__(self, provider):
+
+ tvcache.TVCache.__init__(self, provider)
+
+ # only poll OldPirateBay every 10 minutes max
+ self.minTime = 20
+
+ def _getRSSData(self):
+ search_params = {'RSS': ['rss']}
+ return {'entries': self.provider._doSearch(search_params)}
+
+provider = OldPirateBayProvider()
diff --git a/sickbeard/providers/rarbg.py b/sickbeard/providers/rarbg.py
new file mode 100644
index 00000000..d6f47e73
--- /dev/null
+++ b/sickbeard/providers/rarbg.py
@@ -0,0 +1,281 @@
+# -*- coding: latin-1 -*-
+# Author: djoole
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of Sick Beard.
+#
+# Sick Beard is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Sick Beard is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Sick Beard. If not, see .
+
+import traceback
+import time
+import re
+import datetime
+import sickbeard
+import generic
+import cookielib
+import urllib
+import urllib2
+
+from lib import requests
+from lib.requests import exceptions
+
+from sickbeard.common import USER_AGENT, Quality, cpu_presets
+from sickbeard import logger
+from sickbeard import tvcache
+from sickbeard import show_name_helpers
+from sickbeard.bs4_parser import BS4Parser
+from sickbeard import db
+from sickbeard import helpers
+from sickbeard import classes
+from sickbeard.helpers import sanitizeSceneName
+from sickbeard.exceptions import ex
+
+
+class RarbgProvider(generic.TorrentProvider):
+
+ def __init__(self):
+ generic.TorrentProvider.__init__(self, "Rarbg")
+
+ self.supportsBacklog = True
+ self.enabled = False
+
+ self.cache = RarbgCache(self)
+
+ self.ratio = None
+
+ self.cookies = cookielib.CookieJar()
+ self.cookie = cookielib.Cookie(version=0, name='7fAY799j', value='VtdTzG69', port=None, port_specified=False, domain='rarbg.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
+ self.cookies.set_cookie(self.cookie)
+ self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookies))
+ self.opener.addheaders=[('User-agent', 'Mozilla/5.0')]
+
+ self.urls = {'base_url': 'https://rarbg.com/torrents.php',
+ 'search': 'https://rarbg.com/torrents.php?search=%s&category[]=%s',
+ 'download': 'https://rarbg.com/download.php?id=%s&f=%s',
+ }
+
+ self.url = self.urls['base_url']
+
+ self.subcategories = [41] #18
+
+
+ def getURL(self, url, post_data=None, params=None, timeout=30, json=False):
+ logger.log(u"Rarbg downloading url :" + url, logger.DEBUG)
+ request = urllib2.Request(url)
+ content = self.opener.open(request)
+ return content.read()
+
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'rarbg.png'
+
+ def getQuality(self, item, anime=False):
+ quality = Quality.sceneQuality(item[0], anime)
+ return quality
+
+# def _doLogin(self):
+# login_params = {'login': self.username,
+# 'password': self.password,
+# }
+#
+# self.session = requests.Session()
+#
+# try:
+# response = self.session.post(self.urls['login_page'], data=login_params, timeout=30, verify=False)
+# response = self.session.get(self.urls['base_url'], timeout=30, verify=False)
+# except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
+# logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
+# return False
+#
+# if not re.search('/users/logout/', response.text.lower()):
+# logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
+# return False
+#
+# return True
+
+ def _get_season_search_strings(self, ep_obj):
+
+ search_string = {'Season': []}
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ if ep_obj.show.air_by_date or ep_obj.show.sports:
+ ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
+ elif ep_obj.show.anime:
+ ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number
+ else:
+ ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName.SXX
+
+ search_string['Season'].append(ep_string)
+
+ return [search_string]
+
+ def _get_episode_search_strings(self, ep_obj, add_string=''):
+
+ search_string = {'Episode': []}
+
+ if not ep_obj:
+ return []
+
+ if self.show.air_by_date:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + '.' + \
+ str(ep_obj.airdate).replace('-', '|')
+ search_string['Episode'].append(ep_string)
+ elif self.show.sports:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + '.' + \
+ str(ep_obj.airdate).replace('-', '|') + '|' + \
+ ep_obj.airdate.strftime('%b')
+ search_string['Episode'].append(ep_string)
+ elif self.show.anime:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = sanitizeSceneName(show_name) + '.' + \
+ "%i" % int(ep_obj.scene_absolute_number)
+ search_string['Episode'].append(ep_string)
+ else:
+ for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
+ ep_string = show_name_helpers.sanitizeSceneName(show_name) + '.' + \
+ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
+ 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string
+
+ search_string['Episode'].append(re.sub('\s+', '.', ep_string))
+
+ return [search_string]
+
+ def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0):
+
+ results = []
+ items = {'Season': [], 'Episode': [], 'RSS': []}
+
+ # Get cookie
+ #dummy = self.getURL(self.url)
+
+# if not self._doLogin():
+# return results
+
+ for mode in search_params.keys():
+
+ for search_string in search_params[mode]:
+
+ for sc in self.subcategories:
+ searchURL = self.urls['search'] % (urllib.quote(search_string), sc)
+ logger.log(u"" + self.name + " search page URL: " + searchURL, logger.DEBUG)
+
+ data = self.getURL(searchURL)
+ if not data:
+ continue
+
+ try:
+ with BS4Parser(data, features=["html5lib", "permissive"]) as html:
+ resultsTable = html.find('table', attrs={'class': 'lista2t'})
+
+ if not resultsTable:
+ logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+ logger.DEBUG)
+ continue
+
+ entries = resultsTable.find("tbody").findAll("tr")
+
+ if len(entries) > 0:
+ for result in entries:
+
+ try:
+ link = result.find('a', title=True)
+ torrentName = link['title']
+ torrent_name = str(torrentName)
+ torrentId = result.find_all('td')[1].find_all('a')[0]['href'][1:].replace(
+ 'torrent/', '')
+ torrent_download_url = (self.urls['download'] % (torrentId, torrent_name + '-[rarbg.com].torrent')).encode('utf8')
+ except (AttributeError, TypeError):
+ continue
+
+ if not torrent_name or not torrent_download_url:
+ continue
+
+ item = torrent_name, torrent_download_url
+ logger.log(u"Found result: " + torrent_name + " (" + torrent_download_url + ")",
+ logger.DEBUG)
+ items[mode].append(item)
+
+ else:
+ logger.log(u"The Data returned from " + self.name + " do not contains any torrent",
+ logger.WARNING)
+ continue
+
+ except Exception, e:
+ logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(),
+ logger.ERROR)
+ results += items[mode]
+
+ return results
+
+ def _get_title_and_url(self, item):
+
+ title, url = item
+
+ if title:
+ title = u'' + title
+ title = title.replace(' ', '.')
+
+ if url:
+ url = str(url).replace('&', '&')
+
+ return title, url
+
+ def findPropers(self, search_date=datetime.datetime.today()):
+
+ results = []
+
+ myDB = db.DBConnection()
+ sqlResults = myDB.select(
+ 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
+ ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
+ ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
+ ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' +
+ ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))'
+ )
+
+ if not sqlResults:
+ return []
+
+ for sqlshow in sqlResults:
+ self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"]))
+ if self.show:
+ curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
+ searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')
+
+ for item in self._doSearch(searchString[0]):
+ title, url = self._get_title_and_url(item)
+ results.append(classes.Proper(title, url, datetime.datetime.today(), self.show))
+
+ return results
+
+ def seedRatio(self):
+ return self.ratio
+
+class RarbgCache(tvcache.TVCache):
+ def __init__(self, provider):
+ tvcache.TVCache.__init__(self, provider)
+
+ # Only poll Rarbg every 30 minutes max
+ self.minTime = 30
+
+ def _getRSSData(self):
+ search_params = {'RSS': ['']}
+ return {'entries': self.provider._doSearch(search_params)}
+
+
+provider = RarbgProvider()
diff --git a/sickbeard/providers/shazbat.py b/sickbeard/providers/shazbat.py
new file mode 100644
index 00000000..d723a838
--- /dev/null
+++ b/sickbeard/providers/shazbat.py
@@ -0,0 +1,93 @@
+# Author: Nic Wolfe
+# URL: http://code.google.com/p/sickbeard/
+#
+# This file is part of SickRage.
+#
+# SickRage is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# SickRage is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with SickRage. If not, see .
+
+try:
+ import xml.etree.cElementTree as etree
+except ImportError:
+ import elementtree.ElementTree as etree
+
+import sickbeard
+import generic
+
+from sickbeard.exceptions import ex, AuthException
+from sickbeard import helpers
+from sickbeard import logger
+from sickbeard import tvcache
+
+
+class ShazbatProvider(generic.TorrentProvider):
+ def __init__(self):
+
+ generic.TorrentProvider.__init__(self, "Shazbat.tv")
+
+ self.supportsBacklog = False
+
+ self.enabled = False
+ self.passkey = None
+ self.ratio = None
+ self.options = None
+
+ self.cache = ShazbatCache(self)
+
+ self.urls = {'base_url': 'http://www.shazbat.tv/'}
+ self.url = self.urls['base_url']
+
+ def isEnabled(self):
+ return self.enabled
+
+ def imageName(self):
+ return 'shazbat.png'
+
+ def _checkAuth(self):
+ if not self.passkey:
+ raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.")
+
+ return True
+
+ def _checkAuthFromData(self, data):
+ if not self.passkey:
+ self._checkAuth()
+ elif not (data['entries'] and data['feed']):
+ logger.log(u"Incorrect authentication credentials for " + self.name, logger.DEBUG)
+ raise AuthException(
+ u"Your authentication credentials for " + self.name + " are incorrect, check your config")
+
+ return True
+
+ def seedRatio(self):
+ return self.ratio
+
+
+class ShazbatCache(tvcache.TVCache):
+ def __init__(self, provider):
+ tvcache.TVCache.__init__(self, provider)
+
+ # only poll Shazbat feed every 15 minutes max
+ self.minTime = 15
+
+ def _getRSSData(self):
+
+ rss_url = self.provider.url + 'rss/recent?passkey=' + provider.passkey + '&fname=true'
+ logger.log(self.provider.name + u" cache update URL: " + rss_url, logger.DEBUG)
+
+ return self.getRSSFeed(rss_url, items=['entries', 'feed'])
+
+ def _checkAuth(self, data):
+ return self.provider._checkAuthFromData(data)
+
+provider = ShazbatProvider()
diff --git a/sickbeard/providers/t411.py b/sickbeard/providers/t411.py
index a98e25bb..f555846f 100644
--- a/sickbeard/providers/t411.py
+++ b/sickbeard/providers/t411.py
@@ -59,7 +59,7 @@ class T411Provider(generic.TorrentProvider):
self.url = self.urls['base_url']
- self.subcategories = [637, 455, 433]
+ self.subcategories = [433, 637, 455]
def isEnabled(self):
return self.enabled
diff --git a/sickbeard/providers/thepiratebay.py b/sickbeard/providers/thepiratebay.py
index 1d5df98b..1c1e285e 100644
--- a/sickbeard/providers/thepiratebay.py
+++ b/sickbeard/providers/thepiratebay.py
@@ -59,11 +59,11 @@ class ThePirateBayProvider(generic.TorrentProvider):
self.cache = ThePirateBayCache(self)
- self.urls = {'base_url': 'https://oldpiratebay.org/'}
+ self.urls = {'base_url': 'https://thepiratebay.se/'}
self.url = self.urls['base_url']
- self.searchurl = self.url + 'search.php?q=%s&Torrent_sort=seeders.desc' # order by seed
+ self.searchurl = self.url + 'search/%s/0/7/200' # order by seed
self.re_title_url = '/torrent/(?P\d+)/(?P.*?)//1".+?(?Pmagnet.*?)//1".+?(?P\d+).+?(?P\d+)'
@@ -238,17 +238,14 @@ class ThePirateBayProvider(generic.TorrentProvider):
continue
re_title_url = self.proxy._buildRE(self.re_title_url)
- match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
-
- for torrent in match:
-
+ matches = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data))
+ for torrent in matches:
title = torrent.group('title').replace('_',
'.') #Do not know why but SickBeard skip release with '_' in name
url = torrent.group('url')
id = int(torrent.group('id'))
seeders = int(torrent.group('seeders'))
leechers = int(torrent.group('leechers'))
-
#Filter unseeded torrent
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech):
continue
diff --git a/sickbeard/providers/torrentbytes.py b/sickbeard/providers/torrentbytes.py
index 71d69425..e62a91fb 100644
--- a/sickbeard/providers/torrentbytes.py
+++ b/sickbeard/providers/torrentbytes.py
@@ -81,7 +81,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
login_params = {'username': self.username,
'password': self.password,
- 'login': 'submit'
+ 'login': 'Log in!'
}
self.session = requests.Session()
diff --git a/sickbeard/providers/torrentday.py b/sickbeard/providers/torrentday.py
index 9b6738d3..e74d77ba 100644
--- a/sickbeard/providers/torrentday.py
+++ b/sickbeard/providers/torrentday.py
@@ -54,10 +54,10 @@ class TorrentDayProvider(generic.TorrentProvider):
self.cache = TorrentDayCache(self)
- self.urls = {'base_url': 'http://www.td.af',
- 'login': 'http://www.td.af/torrents/',
- 'search': 'http://www.td.af/V3/API/API.php',
- 'download': 'http://www.td.af/download.php/%s/%s'
+ self.urls = {'base_url': 'https://torrentday.eu',
+ 'login': 'https://torrentday.eu/torrents/',
+ 'search': 'https://torrentday.eu/V3/API/API.php',
+ 'download': 'https://torrentday.eu/download.php/%s/%s'
}
self.url = self.urls['base_url']
diff --git a/sickbeard/sbdatetime.py b/sickbeard/sbdatetime.py
index a6d8c942..0e7ebf52 100644
--- a/sickbeard/sbdatetime.py
+++ b/sickbeard/sbdatetime.py
@@ -130,7 +130,7 @@ class sbdatetime(datetime.datetime):
try:
if sbdatetime.has_locale:
- locale.setlocale(locale.LC_TIME, 'us_US')
+ locale.setlocale(locale.LC_TIME, 'en_US')
except:
sbdatetime.has_locale = False
@@ -210,7 +210,7 @@ class sbdatetime(datetime.datetime):
strd = dt.strftime(sickbeard.DATE_PRESET)
try:
if sbdatetime.has_locale:
- locale.setlocale(locale.LC_TIME, 'us_US')
+ locale.setlocale(locale.LC_TIME, 'en_US')
except:
sbdatetime.has_locale = False
if t_preset is not None:
@@ -226,7 +226,7 @@ class sbdatetime(datetime.datetime):
strd = self.strftime(sickbeard.DATE_PRESET)
try:
if sbdatetime.has_locale:
- locale.setlocale(locale.LC_TIME, 'us_US')
+ locale.setlocale(locale.LC_TIME, 'en_US')
except:
sbdatetime.has_locale = False
if t_preset is not None:
diff --git a/sickbeard/search.py b/sickbeard/search.py
index 46803d3c..63a476bd 100644
--- a/sickbeard/search.py
+++ b/sickbeard/search.py
@@ -242,10 +242,11 @@ def pickBestResult(results, show, quality_list=None):
logger.INFO)
continue
- if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
- cur_result.provider.name):
- logger.log(cur_result.name + u" has previously failed, rejecting it")
- continue
+ if hasattr(cur_result, 'size'):
+ if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(cur_result.name, cur_result.size,
+ cur_result.provider.name):
+ logger.log(cur_result.name + u" has previously failed, rejecting it")
+ continue
if not bestResult or bestResult.quality < cur_result.quality and cur_result.quality != Quality.UNKNOWN:
bestResult = cur_result
diff --git a/sickbeard/show_name_helpers.py b/sickbeard/show_name_helpers.py
index 357649a0..3ffbf142 100644
--- a/sickbeard/show_name_helpers.py
+++ b/sickbeard/show_name_helpers.py
@@ -20,6 +20,7 @@ import os
import re
import datetime
+from functools import partial
import sickbeard
from sickbeard import common
@@ -114,11 +115,15 @@ def sceneToNormalShowNames(name):
return list(set(results))
-def makeSceneShowSearchStrings(show, season=-1):
+def makeSceneShowSearchStrings(show, season=-1, anime=False):
showNames = allPossibleShowNames(show, season=season)
# scenify the names
- return map(sanitizeSceneName, showNames)
+ if anime:
+ sanitizeSceneNameAnime = partial(sanitizeSceneName, anime=True)
+ return map(sanitizeSceneNameAnime, showNames)
+ else:
+ return map(sanitizeSceneName, showNames)
def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
diff --git a/sickbeard/subtitles.py b/sickbeard/subtitles.py
index 70791212..dce61585 100644
--- a/sickbeard/subtitles.py
+++ b/sickbeard/subtitles.py
@@ -71,7 +71,7 @@ def subtitlesLanguages(video_path):
subtitles = video.scan()
languages = set()
for subtitle in subtitles:
- if subtitle.language:
+ if subtitle.language and subtitle.language.alpha2:
languages.add(subtitle.language.alpha2)
else:
languages.add(SINGLE)
diff --git a/sickbeard/traktChecker.py b/sickbeard/traktChecker.py
index e6cc8238..84c68b07 100644
--- a/sickbeard/traktChecker.py
+++ b/sickbeard/traktChecker.py
@@ -37,7 +37,7 @@ class TraktChecker():
def __init__(self):
self.todoWanted = []
- self.trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY)
+ self.trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
def run(self, force=False):
try:
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index 56e3525e..5466712a 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1273,11 +1273,12 @@ class TVShow(object):
if epStatus == FAILED:
return Overview.WANTED
- if curQuality == Quality.UNKNOWN:
+ if epStatus == DOWNLOADED and curQuality == Quality.UNKNOWN:
return Overview.QUAL
- elif epStatus in (SNATCHED_BEST, SNATCHED, SNATCHED_PROPER ) and curQuality == maxBestQuality:
+ elif epStatus in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST):
+ if curQuality < maxBestQuality:
+ return Overview.QUAL
return Overview.SNATCHED
- # if they don't want re-downloads then we call it good if they have anything
elif maxBestQuality == None:
return Overview.GOOD
# if they have one but it's not the best they want then mark it as qual
@@ -1390,6 +1391,7 @@ class TVEpisode(object):
self.episode), logger.DEBUG)
previous_subtitles = self.subtitles
+ added_subtitles = []
try:
need_languages = set(sickbeard.SUBTITLES_LANGUAGES) - set(self.subtitles)
@@ -1407,19 +1409,25 @@ class TVEpisode(object):
helpers.chmodAsParent(subs_new_path)
for subtitle in subtitles.get(video):
+ added_subtitles.append(subtitle.language.alpha2)
new_file_path = ek.ek(os.path.join, subs_new_path, os.path.basename(subtitle.path))
helpers.moveFile(subtitle.path, new_file_path)
helpers.chmodAsParent(new_file_path)
else:
for video in subtitles:
for subtitle in subtitles.get(video):
+ added_subtitles.append(subtitle.language.alpha2)
helpers.chmodAsParent(subtitle.path)
except Exception as e:
logger.log("Error occurred when downloading subtitles: " + traceback.format_exc(), logger.ERROR)
return
- self.refreshSubtitles()
+ if sickbeard.SUBTITLES_MULTI:
+ self.refreshSubtitles()
+ else:
+ self.subtitles = added_subtitles
+
self.subtitles_searchcount = self.subtitles_searchcount + 1 if self.subtitles_searchcount else 1 # added the if because sometime it raise an error
self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
self.saveToDB()
@@ -1931,7 +1939,7 @@ class TVEpisode(object):
myDB = db.DBConnection()
rows = myDB.select(
- 'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
+ 'SELECT episode_id, subtitles FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
[self.show.indexerid, self.season, self.episode])
epID = None
@@ -1940,16 +1948,30 @@ class TVEpisode(object):
if epID:
# use a custom update method to get the data into the DB for existing records.
- return [
- "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, "
- "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
- "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
- "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?",
- [self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
- self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
- self.hastbn,
- self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
- self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
+ # Multi or added subtitle or removed subtitles
+ if sickbeard.SUBTITLES_MULTI or not rows[0]['subtitles'] or not self.subtitles:
+ return [
+ "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, "
+ "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
+ "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
+ "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?",
+ [self.indexerid, self.indexer, self.name, self.description, ",".join([sub for sub in self.subtitles]),
+ self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
+ self.hastbn,
+ self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
+ self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
+ else:
+ # Don't update the subtitle language when the srt file doesn't contain the alpha2 code, keep value from subliminal
+ return [
+ "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, "
+ "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, "
+ "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, "
+ "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?",
+ [self.indexerid, self.indexer, self.name, self.description,
+ self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo,
+ self.hastbn,
+ self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid,
+ self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]]
else:
# use a custom insert method to get the data into the DB.
return [
@@ -2132,7 +2154,27 @@ class TVEpisode(object):
show_name = re.sub("\(\d+\)$", "", self.show.name).rstrip()
else:
show_name = self.show.name
-
+
+ #try to get the release group
+ rel_grp = {};
+ rel_grp["SiCKRAGE"] = 'SiCKRAGE';
+ if hasattr(self, 'location'): #from the location name
+ rel_grp['location'] = release_group(self.show, self.location);
+ if (rel_grp['location'] == ''): del rel_grp['location']
+ if hasattr(self, '_release_group'): #from the release group field in db
+ rel_grp['database'] = self._release_group;
+ if (rel_grp['database'] == ''): del rel_grp['database']
+ if hasattr(self, 'release_name'): #from the release name field in db
+ rel_grp['release_name'] = release_group(self.show, self.release_name);
+ if (rel_grp['release_name'] == ''): del rel_grp['release_name']
+
+ # use release_group, release_name, location in that order
+ if ('database' in rel_grp): relgrp = 'database'
+ elif ('release_name' in rel_grp): relgrp = 'release_name'
+ elif ('location' in rel_grp): relgrp = 'location'
+ else: relgrp = 'SiCKRAGE'
+
+
return {
'%SN': show_name,
'%S.N': dot(show_name),
@@ -2154,7 +2196,7 @@ class TVEpisode(object):
'%AB': '%(#)03d' % {'#': self.absolute_number},
'%XAB': '%(#)03d' % {'#': self.scene_absolute_number},
'%RN': release_name(self.release_name),
- '%RG': release_group(self.show, self.release_name),
+ '%RG': rel_grp[relgrp],
'%AD': str(self.airdate).replace('-', ' '),
'%A.D': str(self.airdate).replace('-', '.'),
'%A_D': us(str(self.airdate)),
@@ -2202,21 +2244,30 @@ class TVEpisode(object):
replace_map = self._replace_map()
result_name = pattern
-
- # if there's no release group then replace it with a reasonable facsimile
+
+ # if there's no release group in the db, let the user know we replaced it
+ if (not hasattr(self, '_release_group') and (not replace_map['%RG'] == 'SiCKRAGE')):
+ logger.log(u"Episode has no release group, replacing it with '" + replace_map['%RG'] + "'", logger.DEBUG);
+ self._release_group = replace_map['%RG'] #if release_group is not in the db, put it there
+ elif ((self._release_group == '') and (not replace_map['%RG'] == 'SiCKRAGE')):
+ logger.log(u"Episode has no release group, replacing it with '" + replace_map['%RG'] + "'", logger.DEBUG);
+ self._release_group = replace_map['%RG'] #if release_group is not in the db, put it there
+
+ # if there's no release name then replace it with a reasonable facsimile
if not replace_map['%RN']:
- if self.show.air_by_date or self.show.sports:
- result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-SiCKRAGE')
- result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-sickrage')
- elif anime_type != 3:
- result_name = result_name.replace('%RN', '%S.N.%AB.%E.N-SiCKRAGE')
- result_name = result_name.replace('%rn', '%s.n.%ab.%e.n-sickrage')
- else:
- result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-SiCKRAGE')
- result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-sickrage')
- result_name = result_name.replace('%RG', 'SICKRAGE')
- result_name = result_name.replace('%rg', 'sickrage')
+ if self.show.air_by_date or self.show.sports:
+ result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-' + replace_map['%RG'])
+ result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-' + replace_map['%RG'].lower())
+
+ elif anime_type != 3:
+ result_name = result_name.replace('%RN', '%S.N.%AB.%E.N-' + replace_map['%RG'])
+ result_name = result_name.replace('%rn', '%s.n.%ab.%e.n-' + replace_map['%RG'].lower())
+
+ else:
+ result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-' + replace_map['%RG'])
+ result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-' + replace_map['%RG'].lower())
+
logger.log(u"Episode has no release name, replacing it with a generic one: " + result_name, logger.DEBUG)
if not replace_map['%RT']:
@@ -2437,7 +2488,7 @@ class TVEpisode(object):
return
related_files = postProcessor.PostProcessor(self.location).list_associated_files(
- self.location)
+ self.location, base_name_only=True)
if self.show.subtitles and sickbeard.SUBTITLES_DIR != '':
related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickbeard.SUBTITLES_DIR,
@@ -2451,8 +2502,17 @@ class TVEpisode(object):
# move related files
for cur_related_file in related_files:
- cur_result = helpers.rename_ep_file(cur_related_file, absolute_proper_path,
- absolute_current_path_no_ext_length)
+ #We need to fix something here because related files can be in subfolders and the original code doesn't handle this (at all)
+ cur_related_dir = ek.ek(os.path.dirname, ek.ek(os.path.abspath, cur_related_file))
+ subfolder = cur_related_dir.replace(ek.ek(os.path.dirname, ek.ek(os.path.abspath, self.location)), '')
+ #We now have a subfolder. We need to add that to the absolute_proper_path.
+ #First get the absolute proper-path dir
+ proper_related_dir = ek.ek(os.path.dirname, ek.ek(os.path.abspath, absolute_proper_path + file_ext))
+ proper_related_path = absolute_proper_path.replace(proper_related_dir, proper_related_dir + subfolder)
+
+
+ cur_result = helpers.rename_ep_file(cur_related_file, proper_related_path,
+ absolute_current_path_no_ext_length + len(subfolder))
if not cur_result:
logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_file, logger.ERROR)
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index 057777b4..d756ee66 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -51,7 +51,7 @@ from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering,
get_xem_numbering_for_show, get_scene_absolute_numbering_for_show, get_xem_absolute_numbering_for_show, \
get_scene_absolute_numbering
-from lib.dateutil import tz
+from lib.dateutil import tz, parser as dateutil_parser
from lib.unrar2 import RarFile
from lib import adba, subliminal
from lib.trakt import TraktAPI
@@ -192,7 +192,7 @@ class BaseHandler(RequestHandler):
def get_current_user(self, *args, **kwargs):
if not isinstance(self, UI) and sickbeard.WEB_USERNAME and sickbeard.WEB_PASSWORD:
- return self.get_secure_cookie('user')
+ return self.get_secure_cookie('sickrage_user')
else:
return True
@@ -269,14 +269,14 @@ class LoginHandler(BaseHandler):
if api_key:
remember_me = int(self.get_argument('remember_me', default=0) or 0)
- self.set_secure_cookie('user', api_key, expires_days=30 if remember_me > 0 else None)
+ self.set_secure_cookie('sickrage_user', api_key, expires_days=30 if remember_me > 0 else None)
self.redirect('/home/')
class LogoutHandler(BaseHandler):
def get(self, *args, **kwargs):
- self.clear_cookie("user")
+ self.clear_cookie("sickrage_user")
self.redirect('/login/')
class KeyHandler(RequestHandler):
@@ -585,9 +585,11 @@ class CalendarHandler(BaseHandler):
ical = ical + 'UID:Sick-Beard-' + str(datetime.date.today().isoformat()) + '-' + show[
'show_name'].replace(" ", "-") + '-E' + str(episode['episode']) + 'S' + str(
episode['season']) + '\r\n'
- if (episode['description'] is not None and episode['description'] != ''):
- ical = ical + 'DESCRIPTION:' + show['airs'] + ' on ' + show['network'] + '\\n\\n' + \
- episode['description'].splitlines()[0] + '\r\n'
+ if episode['description']:
+ ical = ical + 'DESCRIPTION: {0} on {1} \\n\\n {2}\r\n'.format(
+ (show['airs'] or '(Unknown airs)'),
+ (show['network'] or 'Unknown network'),
+ episode['description'].splitlines()[0])
else:
ical = ical + 'DESCRIPTION:' + (show['airs'] or '(Unknown airs)') + ' on ' + (
show['network'] or 'Unknown network') + '\r\n'
@@ -2234,7 +2236,7 @@ class HomeAddShows(Home):
logger.log(u"Getting recommended shows from Trakt.tv", logger.DEBUG)
- trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY)
+ trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
try:
recommendedlist = trakt_api.traktRequest("recommendations/shows?extended=full,images")
@@ -2242,12 +2244,12 @@ class HomeAddShows(Home):
if recommendedlist:
indexers = ['tvdb', 'tvrage']
map(final_results.append, (
- [int(show['show']['ids'][indexers[sickbeard.TRAKT_DEFAULT_INDEXER - 1]]),
- 'http://www.trakt.tv/shows/%s' % show['show']['ids']['slug'], show['show']['title'],
- show['show']['overview'],
- datetime.date.fromtimestamp(int(show['show']['first_aired']) / 1000.0).strftime('%Y%m%d')]
+ [int(show['ids'][indexers[sickbeard.TRAKT_DEFAULT_INDEXER - 1]]),
+ 'http://www.trakt.tv/shows/%s' % show['ids']['slug'], show['title'],
+ show['overview'],
+ None if show['first_aired'] is None else dateutil_parser.parse(show['first_aired']).strftime('%Y%m%d')]
for show in recommendedlist if not helpers.findCertainShow(sickbeard.showList, [
- int(show['show']['ids'][indexers[sickbeard.TRAKT_DEFAULT_INDEXER - 1]])])))
+ int(show['ids'][indexers[sickbeard.TRAKT_DEFAULT_INDEXER - 1]])])))
except (traktException, traktAuthException, traktServerBusy) as e:
logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
@@ -2290,7 +2292,7 @@ class HomeAddShows(Home):
t.trending_shows = []
- trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY)
+ trakt_api = TraktAPI(sickbeard.TRAKT_API_KEY, sickbeard.TRAKT_USERNAME, sickbeard.TRAKT_PASSWORD, sickbeard.TRAKT_DISABLE_SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)
try:
shows = trakt_api.traktRequest("shows/trending?limit=50&extended=full,images") or []
@@ -3478,7 +3480,7 @@ class ConfigGeneral(Config):
def saveGeneral(self, log_dir=None, log_nr = 5, log_size = 1048576, web_port=None, web_log=None, encryption_version=None, web_ipv6=None,
update_shows_on_start=None, trash_remove_show=None, trash_rotate_logs=None, update_frequency=None,
- launch_browser=None, web_username=None,
+ launch_browser=None, showupdate_hour=3, web_username=None,
api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None,
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None,
@@ -3494,6 +3496,7 @@ class ConfigGeneral(Config):
sickbeard.PLAY_VIDEOS = config.checkbox_to_value(play_videos)
sickbeard.DOWNLOAD_URL = download_url
sickbeard.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser)
+ sickbeard.SHOWUPDATE_HOUR = config.to_int(showupdate_hour)
config.change_VERSION_NOTIFY(config.checkbox_to_value(version_notify))
sickbeard.AUTO_UPDATE = config.checkbox_to_value(auto_update)
sickbeard.NOTIFY_ON_UPDATE = config.checkbox_to_value(notify_on_update)
@@ -3525,7 +3528,7 @@ class ConfigGeneral(Config):
sickbeard.ENCRYPTION_VERSION = config.checkbox_to_value(encryption_version)
sickbeard.WEB_USERNAME = web_username
sickbeard.WEB_PASSWORD = web_password
-
+
sickbeard.DISPLAY_FILESIZE = config.checkbox_to_value(display_filesize)
sickbeard.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating)
sickbeard.TRIM_ZERO = config.checkbox_to_value(trim_zero)
@@ -3654,7 +3657,7 @@ class ConfigSearch(Config):
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_label_anime=None, torrent_path=None, torrent_verify_cert=None,
torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None,
- torrent_rpcurl=None, ignore_words=None, require_words=None):
+ torrent_rpcurl=None, torrent_auth_type = None, ignore_words=None, require_words=None):
results = []
@@ -3715,6 +3718,7 @@ class ConfigSearch(Config):
sickbeard.TORRENT_HIGH_BANDWIDTH = config.checkbox_to_value(torrent_high_bandwidth)
sickbeard.TORRENT_HOST = config.clean_url(torrent_host)
sickbeard.TORRENT_RPCURL = torrent_rpcurl
+ sickbeard.TORRENT_AUTH_TYPE = torrent_auth_type
sickbeard.save_config()
@@ -4426,7 +4430,7 @@ class ConfigNotifications(Config):
use_trakt=None, trakt_username=None, trakt_password=None,
trakt_remove_watchlist=None, trakt_use_watchlist=None, trakt_method_add=None,
trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None,
- trakt_default_indexer=None, trakt_remove_serieslist=None, trakt_disable_ssl_verify=None,
+ trakt_default_indexer=None, trakt_remove_serieslist=None, trakt_disable_ssl_verify=None, trakt_timeout=None,
use_synologynotifier=None, synologynotifier_notify_onsnatch=None,
synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None,
@@ -4548,6 +4552,7 @@ class ConfigNotifications(Config):
sickbeard.TRAKT_SYNC = config.checkbox_to_value(trakt_sync)
sickbeard.TRAKT_DEFAULT_INDEXER = int(trakt_default_indexer)
sickbeard.TRAKT_DISABLE_SSL_VERIFY = config.checkbox_to_value(trakt_disable_ssl_verify)
+ sickbeard.TRAKT_TIMEOUT = int(trakt_timeout)
if sickbeard.USE_TRAKT:
sickbeard.traktCheckerScheduler.silent = False
diff --git a/sickbeard/webserveInit.py b/sickbeard/webserveInit.py
index 6ddb53a5..5f26c0eb 100644
--- a/sickbeard/webserveInit.py
+++ b/sickbeard/webserveInit.py
@@ -71,7 +71,7 @@ class SRWebServer(threading.Thread):
autoreload=False,
gzip=True,
xheaders=sickbeard.HANDLE_REVERSE_PROXY,
- cookie_secret='61oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=',
+ cookie_secret=sickbeard.WEB_COOKIE_SECRET,
login_url='%s/login/' % self.options['web_root'],
)