1
0
mirror of https://github.com/moparisthebest/SickRage synced 2024-12-12 11:02:21 -05:00

Merge branch 'release/v4.0.0'

Conflicts:
	.travis.yml
	readme.md
This commit is contained in:
echel0n 2014-12-17 02:39:29 -08:00
commit 3147ff2ae5
208 changed files with 6294 additions and 11911 deletions

12
.gitignore vendored
View File

@ -1,4 +1,4 @@
# SB User Related #
# SR User Related #
######################
cache/*
cache.db*
@ -11,11 +11,13 @@ server.crt
server.key
restore/
# SB Test Related #
# SR Test Related #
######################
tests/Logs/*
tests/sickbeard.*
tests/cache.db
tests/cache/*
tests/sickbeard.db*
tests/cache.db*
tests/failed.db
# Compiled source #
######################
@ -46,4 +48,4 @@ Thumbs.db
.directory
*~
/.idea/
*.torrent
*.torrent

View File

@ -17,4 +17,4 @@ script:
- ./tests/all_tests.py
notifications:
irc: "irc.freenode.net#sickrage"
irc: "irc.freenode.net#sickrage"

View File

@ -49,7 +49,7 @@
* Add image to be used when Trakt posters are void on Add Show/Add Trending Show page
* Fix growl registration not sending sickrage an update notification registration
* Add an anonymous redirect builder for external links
* Update xbmc link to Kodi at Config Notifications
* Update kodi link to Kodi at Config Notifications
* Fix missing url for kickasstorrents in config_providers
* Fix post processing when using tvrage indexer and mediabrowser metadata generation
* Change reporting failed network_timezones.txt updates from an error to a warning

View File

@ -33,7 +33,6 @@ if sys.version_info < (2, 6):
try:
import Cheetah
if Cheetah.Version[0] != '2':
raise ValueError
except ValueError:
@ -45,7 +44,7 @@ except:
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
# We only need this for compiling an EXE and I will just always do that on 2.6+
if sys.hexversion >= 0x020600F0:
@ -256,36 +255,22 @@ class SickRage(object):
raise SystemExit(
"Config file root dir '" + os.path.dirname(sickbeard.CONFIG_FILE) + "' must be writeable.")
# Check if we need to perform a restore first
restoreDir = os.path.join(sickbeard.DATA_DIR, 'restore')
if os.path.exists(restoreDir):
if self.restore(restoreDir, sickbeard.DATA_DIR):
logger.log(u"Restore successful...")
else:
logger.log(u"Restore FAILED!", logger.ERROR)
os.chdir(sickbeard.DATA_DIR)
# Check if we need to perform a restore first
restoreDir = os.path.join(sickbeard.DATA_DIR, 'restore')
if self.consoleLogging and os.path.exists(restoreDir):
if self.restore(restoreDir, sickbeard.DATA_DIR):
sys.stdout.write("Restore successful...\n")
else:
sys.stdout.write("Restore FAILED!\n")
# Load the config and publish it to the sickbeard package
if not os.path.isfile(sickbeard.CONFIG_FILE):
logger.log(u"Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!", logger.ERROR)
if self.consoleLogging and not os.path.isfile(sickbeard.CONFIG_FILE):
sys.stdout.write("Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!" + "\n")
sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)
CUR_DB_VERSION = db.DBConnection().checkDBVersion()
if CUR_DB_VERSION > 0:
if CUR_DB_VERSION < MIN_DB_VERSION:
raise SystemExit("Your database version (" + str(
CUR_DB_VERSION) + ") is too old to migrate from with this version of SickRage (" + str(
MIN_DB_VERSION) + ").\n" + \
"Upgrade using a previous version of SB first, or start with no database file to begin fresh.")
if CUR_DB_VERSION > MAX_DB_VERSION:
raise SystemExit("Your database version (" + str(
CUR_DB_VERSION) + ") has been incremented past what this version of SickRage supports (" + str(
MAX_DB_VERSION) + ").\n" + \
"If you have used other forks of SB, your database may be unusable due to their modifications.")
# Initialize the config and our threads
sickbeard.initialize(consoleLogging=self.consoleLogging)
@ -343,7 +328,7 @@ class SickRage(object):
logger.ERROR)
if sickbeard.LAUNCH_BROWSER and not self.runAsDaemon:
logger.log(u"Launching browser and exiting", logger.ERROR)
sickbeard.launchBrowser(self.startPort)
sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.startPort, sickbeard.WEB_ROOT)
os._exit(1)
if self.consoleLogging:
@ -368,7 +353,7 @@ class SickRage(object):
# Launch browser
if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
sickbeard.launchBrowser(self.startPort)
sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.startPort, sickbeard.WEB_ROOT)
# main loop
while (True):
@ -517,7 +502,6 @@ class SickRage(object):
if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch']
logger.log(u"Restarting SickRage with " + str(popen_list))
logger.close()
subprocess.Popen(popen_list, cwd=os.getcwd())
# system exit

View File

@ -23,6 +23,12 @@ from __future__ import with_statement
import os.path
import sys
try:
import requests
except ImportError:
print ("You need to install python requests library")
sys.exit(1)
# Try importing Python 2 modules using new names
try:
import ConfigParser as configparser
@ -35,20 +41,6 @@ except ImportError:
import urllib.request as urllib2
from urllib.parse import urlencode
# workaround for broken urllib2 in python 2.6.5: wrong credentials lead to an infinite recursion
if sys.version_info >= (2, 6, 5) and sys.version_info < (2, 6, 6):
class HTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
def retry_http_basic_auth(self, host, req, realm):
# don't retry if auth failed
if req.get_header(self.auth_header, None) is not None:
return None
return urllib2.HTTPBasicAuthHandler.retry_http_basic_auth(self, host, req, realm)
else:
HTTPBasicAuthHandler = urllib2.HTTPBasicAuthHandler
def processEpisode(dir_to_process, org_NZB_name=None, status=None):
# Default values
host = "localhost"
@ -125,20 +117,17 @@ def processEpisode(dir_to_process, org_NZB_name=None, status=None):
else:
protocol = "http://"
url = protocol + host + ":" + port + web_root + "home/postprocess/processEpisode?" + urlencode(params)
url = protocol + host + ":" + port + web_root + "home/postprocess/processEpisode"
login_url = protocol + host + ":" + port + web_root + "login"
print ("Opening URL: " + url)
try:
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, username, password)
handler = HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
sess = requests.Session()
sess.post(login_url, data={'username': username, 'password': password}, stream=True, verify=False)
result = sess.get(url, params=params, stream=True, verify=False)
result = opener.open(url).readlines()
for line in result:
for line in result.iter_lines():
if line:
print (line.strip())

View File

@ -190,18 +190,21 @@ def main():
host = 'localhost'
url = protocol + host + ":" + port + web_root + "/home/postprocess/processEpisode"
login_url = protocol + host + ":" + port + web_root + "/login"
scriptlogger.debug("Opening URL: " + url + ' with params=' + str(params))
print "Opening URL: " + url + ' with params=' + str(params)
try:
response = requests.get(url, auth=(username, password), params=params, verify=False)
sess = requests.Session()
sess.post(login_url, data={'username': username, 'password': password}, stream=True, verify=False)
response = sess.get(url, auth=(username, password), params=params, verify=False, allow_redirects=False)
except Exception, e:
scriptlogger.error(u': Unknown exception raised when opening url: ' + str(e))
time.sleep(3)
sys.exit()
if response.status_code == 401:
if response.status_code == 302:
scriptlogger.error(u'Invalid Sickbeard Username or Password, check your config')
print 'Invalid Sickbeard Username or Password, check your config'
time.sleep(3)

View File

@ -1,13 +1,15 @@
from distutils.core import setup
import py2exe, sys, shutil
from setuptools import setup
sys.argv.append('py2exe')
setup(
options = {'py2exe': {'bundle_files': 1}},
# windows = [{'console': "sabToSickbeard.py"}],
zipfile = None,
console = ['sabToSickbeard.py'],
)
shutil.copy('dist/sabToSickbeard.exe', '.')
setup(name='sickrage',
version='3.3.2',
description='Automated Video File Manager',
url='http://github.com/SiCKRAGETV/SickRage',
author='echel0n',
author_email='sickrage.tv@gmail.com',
license='MIT',
packages=['funniest'],
install_requires=[
'requests',
],
zip_safe=False,
)

View File

@ -1,6 +1,6 @@
### Questions about SickRage?
To get your questions answered, please ask in the [SickRage Forum], on IRC \#sickrage pn freenode.net, or webchat.
To get your questions answered, please ask on the [SickRage Forum](http://sickrage.tv/), or [#sickrage](http://webchat.freenode.net/?channels=sickrage) IRC channel on irc.freenode.net
# Contributing to SickRage
@ -34,7 +34,7 @@ If you are new to SickRage, it is usually a much better idea to ask for help fir
### Try the latest version of SickRage
Bugs in old versions of SickRage may have already been fixed. In order to avoid reporting known issues, make sure you are always testing against the latest build/source. Also, we put new code in the `dev` branch first before pushing down to the `master` branch (which is what the binary builds are built off of).
Bugs in old versions of SickRage may have already been fixed. In order to avoid reporting known issues, make sure you are always testing against the latest build/source. Also, we put new code in the `develop` branch first before pushing down to the `master` branch (which is what the binary builds are built off of).
## Tips For Submitting Code
@ -49,7 +49,7 @@ Bugs in old versions of SickRage may have already been fixed. In order to avoid
Please follow these guidelines before reporting a bug:
1. **Update to the latest version** &mdash; Check if you can reproduce the issue with the latest version from the `dev` branch.
1. **Update to the latest version** &mdash; Check if you can reproduce the issue with the latest version from the `develop` branch.
2. **Use the SickRage Forums search** &mdash; check if the issue has already been reported. If it has been, please comment on the existing issue.
@ -58,7 +58,7 @@ Please follow these guidelines before reporting a bug:
### Feature requests
Please follow the bug guidelines above for feature requests, i.e. update to the latest version and search for existing issues before posting a new request. You can submit Feature Requests in the [SickRage Forum] as well.
Please follow the bug guidelines above for feature requests, i.e. update to the latest version and search for existing issues before posting a new request. You can submit Feature Requests in the [SickRage Forum](http://sickrage.tv/) as well.
### Pull requests
@ -68,7 +68,7 @@ Please follow these guidelines before sending a pull request:
1. Update your fork to the latest upstream version.
2. Use the `dev` branch to base your code off of. Create a topic-branch for your work. We will not merge your 'dev' branch, or your 'master' branch, only topic branches, coming from dev are merged.
2. Use the `develop` branch to base your code off of. Create a topic-branch for your work. We will not merge your 'dev' branch, or your 'master' branch, only topic branches, coming from dev are merged.
3. Follow the coding conventions of the original repository. Do not change line endings of the existing file, as this will rewrite the file and loses history.
@ -106,7 +106,7 @@ Please follow this process; it's the best way to get your work included in the p
- Create a new topic branch to contain your feature, change, or fix:
```bash
git checkout -b <topic-branch-name> dev
git checkout -b <topic-branch-name> develop
```
- Commit your changes in logical chunks. or your pull request is unlikely

View File

@ -452,7 +452,7 @@ inc_top.tmpl
background-position: -357px 0px;
}
.menu-icon-xbmc {
.menu-icon-kodi {
background-position: -378px 0px;
}
@ -486,7 +486,7 @@ inc_top.tmpl
background-position: -294px 0px;
}
.submenu-icon-xbmc {
.submenu-icon-kodi {
background-position: -378px 0px;
}

View File

@ -1108,7 +1108,7 @@ p {
font-weight: 300;
line-height: 1.4;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.lead {
font-size: 21px;
}
@ -1257,7 +1257,7 @@ dt {
dd {
margin-left: 0;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.dl-horizontal dt {
float: left;
width: 160px;
@ -1396,12 +1396,12 @@ pre code {
margin-right: auto;
margin-left: auto;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.container {
width: 750px;
}
}
@media (min-width: 992px) {
@media (min-width: 1011px) {
.container {
width: 970px;
}
@ -1583,7 +1583,7 @@ pre code {
.col-xs-offset-0 {
margin-left: 0;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12 {
float: left;
}
@ -1741,7 +1741,7 @@ pre code {
margin-left: 0;
}
}
@media (min-width: 992px) {
@media (min-width: 1011px) {
.col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12 {
float: left;
}
@ -2636,7 +2636,7 @@ select[multiple].input-lg {
margin-bottom: 10px;
color: #737373;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.form-inline .form-group {
display: inline-block;
margin-bottom: 0;
@ -2699,7 +2699,7 @@ select[multiple].input-lg {
margin-right: -15px;
margin-left: -15px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.form-horizontal .control-label {
padding-top: 7px;
margin-bottom: 0;
@ -2710,12 +2710,12 @@ select[multiple].input-lg {
top: 0;
right: 15px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.form-horizontal .form-group-lg .control-label {
padding-top: 14.3px;
}
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.form-horizontal .form-group-sm .control-label {
padding-top: 6px;
}
@ -3239,7 +3239,7 @@ tbody.collapse.in {
bottom: 100%;
margin-bottom: 1px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-right .dropdown-menu {
right: 0;
left: auto;
@ -3658,7 +3658,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
top: auto;
left: auto;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.nav-tabs.nav-justified > li {
display: table-cell;
width: 1%;
@ -3676,7 +3676,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
.nav-tabs.nav-justified > .active > a:focus {
border: 1px solid #ddd;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.nav-tabs.nav-justified > li > a {
border-bottom: 1px solid #ddd;
border-radius: 4px 4px 0 0;
@ -3723,7 +3723,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
top: auto;
left: auto;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.nav-justified > li {
display: table-cell;
width: 1%;
@ -3744,7 +3744,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
.nav-tabs-justified > .active > a:focus {
border: 1px solid #ddd;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.nav-tabs-justified > li > a {
border-bottom: 1px solid #ddd;
border-radius: 4px 4px 0 0;
@ -3772,12 +3772,12 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
margin-bottom: 20px;
border: 1px solid transparent;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar {
border-radius: 4px;
}
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-header {
float: left;
}
@ -3794,7 +3794,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
.navbar-collapse.in {
overflow-y: auto;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-collapse {
width: auto;
border-top: 0;
@ -3834,7 +3834,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
margin-right: -15px;
margin-left: -15px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.container > .navbar-header,
.container-fluid > .navbar-header,
.container > .navbar-collapse,
@ -3847,7 +3847,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
z-index: 1000;
border-width: 0 0 1px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-static-top {
border-radius: 0;
}
@ -3859,7 +3859,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
left: 0;
z-index: 1030;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-fixed-top,
.navbar-fixed-bottom {
border-radius: 0;
@ -3885,7 +3885,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
.navbar-brand:focus {
text-decoration: none;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar > .container .navbar-brand,
.navbar > .container-fluid .navbar-brand {
margin-left: -15px;
@ -3915,7 +3915,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
.navbar-toggle .icon-bar + .icon-bar {
margin-top: 4px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-toggle {
display: none;
}
@ -3951,7 +3951,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
background-image: none;
}
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-nav {
float: left;
margin: 0;
@ -3967,7 +3967,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
margin-right: -15px;
}
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-left {
float: left !important;
}
@ -3986,7 +3986,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1), 0 1px 0 rgba(255, 255, 255, .1);
box-shadow: inset 0 1px 0 rgba(255, 255, 255, .1), 0 1px 0 rgba(255, 255, 255, .1);
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-form .form-group {
display: inline-block;
margin-bottom: 0;
@ -4038,7 +4038,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
margin-bottom: 5px;
}
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-form {
width: auto;
padding-top: 0;
@ -4078,7 +4078,7 @@ select[multiple].input-group-sm > .input-group-btn > .btn {
margin-top: 15px;
margin-bottom: 15px;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.navbar-text {
float: left;
margin-right: 15px;
@ -4565,7 +4565,7 @@ a.list-group-item.active > .badge,
.jumbotron .container {
max-width: 100%;
}
@media screen and (min-width: 768px) {
@media screen and (min-width: 1010px) {
.jumbotron {
padding-top: 48px;
padding-bottom: 48px;
@ -5473,7 +5473,7 @@ button.close {
height: 50px;
overflow: scroll;
}
@media (min-width: 768px) {
@media (min-width: 1010px) {
.modal-dialog {
width: 600px;
margin: 30px auto;
@ -5486,7 +5486,7 @@ button.close {
width: 300px;
}
}
@media (min-width: 992px) {
@media (min-width: 1011px) {
.modal-lg {
width: 900px;
}
@ -5873,7 +5873,7 @@ button.close {
.carousel-caption .btn {
text-shadow: none;
}
@media screen and (min-width: 768px) {
@media screen and (min-width: 1010px) {
.carousel-control .glyphicon-chevron-left,
.carousel-control .glyphicon-chevron-right,
.carousel-control .icon-prev,
@ -6040,7 +6040,7 @@ button.close {
display: inline-block !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
@media (min-width: 1010px) and (max-width: 1012px) {
.visible-sm {
display: block !important;
}
@ -6055,22 +6055,22 @@ button.close {
display: table-cell !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
@media (min-width: 1010px) and (max-width: 1012px) {
.visible-sm-block {
display: block !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
@media (min-width: 1010px) and (max-width: 1012px) {
.visible-sm-inline {
display: inline !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
@media (min-width: 1010px) and (max-width: 1012px) {
.visible-sm-inline-block {
display: inline-block !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
@media (min-width: 1011px) and (max-width: 1199px) {
.visible-md {
display: block !important;
}
@ -6085,17 +6085,17 @@ button.close {
display: table-cell !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
@media (min-width: 1011px) and (max-width: 1199px) {
.visible-md-block {
display: block !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
@media (min-width: 1011px) and (max-width: 1199px) {
.visible-md-inline {
display: inline !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
@media (min-width: 1011px) and (max-width: 1199px) {
.visible-md-inline-block {
display: inline-block !important;
}
@ -6135,12 +6135,12 @@ button.close {
display: none !important;
}
}
@media (min-width: 768px) and (max-width: 991px) {
@media (min-width: 1010px) and (max-width: 1012px) {
.hidden-sm {
display: none !important;
}
}
@media (min-width: 992px) and (max-width: 1199px) {
@media (min-width: 1011px) and (max-width: 1199px) {
.hidden-md {
display: none !important;
}

View File

@ -434,7 +434,7 @@ inc_top.tmpl
background-position: -357px 0px;
}
.menu-icon-xbmc {
.menu-icon-kodi {
background-position: -378px 0px;
}
@ -468,7 +468,7 @@ inc_top.tmpl
background-position: -294px 0px;
}
.submenu-icon-xbmc {
.submenu-icon-kodi {
background-position: -378px 0px;
}

View File

@ -434,7 +434,7 @@ inc_top.tmpl
background-position: -357px 0px;
}
.menu-icon-xbmc {
.menu-icon-kodi {
background-position: -378px 0px;
}
@ -468,7 +468,7 @@ inc_top.tmpl
background-position: -294px 0px;
}
.submenu-icon-xbmc {
.submenu-icon-kodi {
background-position: -378px 0px;
}
@ -3133,4 +3133,74 @@ pnotify.css
.ui-pnotify-closer {
margin-top: -12px;
margin-right: -10px;
}
}
/* =======================================================================
login.css
========================================================================== */
.login {
display: block;
}
.login h1 {
padding: 0 0 10px;
font-size: 60px;
font-family: Lobster;
font-weight: normal;
}
.login form {
padding: 0;
height: 300px;
width: 400px;
position: fixed;
left: 50%;
top: 50%;
margin: -200px 0 0 -200px;
}
@media all and (max-width: 480px) {
.login form {
padding: 0;
height: 300px;
width: 90%;
position: absolute;
left: 5%;
top: 10px;
margin: 0;
}
}
.login .ctrlHolder {
padding: 0;
margin: 0 0 20px;
}
.login .ctrlHolder:hover {
background: none;
}
.login input[type=text],
.login input[type=password] {
width: 100% !important;
font-size: 25px;
padding: 14px !important;
}
.login .remember_me {
font-size: 15px;
float: left;
width: 150px;
padding: 20px 0;
}
.login .remember_me .check {
margin: 5px 5px 0 0;
}
.login .button {
font-size: 25px;
padding: 20px;
float: right;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -1,25 +1,28 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>API Builder</title>
<script type="text/javascript" charset="utf-8">
<!--
sbRoot = "$sbRoot";
//-->
</script>
<script type="text/javascript" src="$sbRoot/js/lib/jquery-1.8.3.min.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/apibuilder.js?$sbPID"></script>
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>SickRage - API Builder</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width">
<meta name="robots" content="noindex">
<style type="text/css">
<!--
#apibuilder select { padding: 2px 2px 2px 6px; display: block; float: left; margin: auto 8px 4px auto;
}
#apibuilder select option { padding: 1px 6px; line-height: 1.2em; }
#apibuilder .disabled { color: #ccc; }
#apibuilder .action { background-color: #efefef; }
-->
</style>
<script type="text/javascript" charset="utf-8">
<!--
sbRoot = "$sbRoot";
//-->
</script>
<script type="text/javascript" src="$sbRoot/js/lib/jquery-1.8.3.min.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/apibuilder.js?$sbPID"></script>
<style type="text/css">
<!--
#apibuilder select { padding: 2px 2px 2px 6px; display: block; float: left; margin: auto 8px 4px auto; }
#apibuilder select option { padding: 1px 6px; line-height: 1.2em; }
#apibuilder .disabled { color: #ccc; }
#apibuilder .action { background-color: #efefef; }
-->
</style>
<script type="text/javascript">
var hide_empty_list=true;
@ -27,22 +30,20 @@ var disable_empty_list=true;
addListGroup("api", "Command");
addOption("Command", "SickBeard", "?cmd=sb", 1); //make default
addList("Command", "SickBeard.AddRootDir", "?cmd=sb.addrootdir", "sb.addrootdir", "", "", "action");
addOption("Command", "SickBeard.CheckScheduler", "?cmd=sb.checkscheduler", "", "", "action");
addList("Command", "SickBeard.DeleteRootDir", "?cmd=sb.deleterootdir", "sb.deleterootdir", "", "", "action");
addOption("Command", "SickBeard.ForceSearch", "?cmd=sb.forcesearch", "", "", "action");
addOption("Command", "SickBeard.GetDefaults", "?cmd=sb.getdefaults", "", "", "action");
addOption("Command", "SickBeard.GetMessages", "?cmd=sb.getmessages", "", "", "action");
addOption("Command", "SickBeard.GetRootDirs", "?cmd=sb.getrootdirs", "", "", "action");
addList("Command", "SickBeard.PauseBacklog", "?cmd=sb.pausebacklog", "sb.pausebacklog", "", "", "action");
addOption("Command", "SickBeard.Ping", "?cmd=sb.ping", "", "", "action");
addOption("Command", "SickBeard.Restart", "?cmd=sb.restart", "", "", "action");
addList("Command", "SickBeard.SearchAllIndexers", "?cmd=sb.searchindexers", "sb.searchindexers", "", "", "action");
addList("Command", "SickBeard.SearchTVDB", "?cmd=sb.searchtvdb&indexer=1", "sb.searchindexers", "", "", "action");
addList("Command", "SickBeard.SearchTVRage", "?cmd=sb.searchtvrage&indexer=2", "sb.searchindexers", "", "", "action");
addList("Command", "SickBeard.SetDefaults", "?cmd=sb.setdefaults", "sb.setdefaults", "", "", "action");
addOption("Command", "SickBeard.Shutdown", "?cmd=sb.shutdown", "", "", "action");
addOption("Command", "SickRage", "?cmd=sb", 1); //make default
addList("Command", "SickRage.AddRootDir", "?cmd=sb.addrootdir", "sb.addrootdir", "", "", "action");
addOption("Command", "SickRage.CheckScheduler", "?cmd=sb.checkscheduler", "", "", "action");
addList("Command", "SickRage.DeleteRootDir", "?cmd=sb.deleterootdir", "sb.deleterootdir", "", "", "action");
addOption("Command", "SickRage.ForceSearch", "?cmd=sb.forcesearch", "", "", "action");
addOption("Command", "SickRage.GetDefaults", "?cmd=sb.getdefaults", "", "", "action");
addOption("Command", "SickRage.GetMessages", "?cmd=sb.getmessages", "", "", "action");
addOption("Command", "SickRage.GetRootDirs", "?cmd=sb.getrootdirs", "", "", "action");
addList("Command", "SickRage.PauseBacklog", "?cmd=sb.pausebacklog", "sb.pausebacklog", "", "", "action");
addOption("Command", "SickRage.Ping", "?cmd=sb.ping", "", "", "action");
addOption("Command", "SickRage.Restart", "?cmd=sb.restart", "", "", "action");
addList("Command", "SickRage.searchindexers", "?cmd=sb.searchindexers", "sb.searchindexers", "", "", "action");
addList("Command", "SickRage.SetDefaults", "?cmd=sb.setdefaults", "sb.setdefaults", "", "", "action");
addOption("Command", "SickRage.Shutdown", "?cmd=sb.shutdown", "", "", "action");
addList("Command", "Coming Episodes", "?cmd=future", "future");
addList("Command", "Episode", "?cmd=episode", "episode");
addList("Command", "Episode.Search", "?cmd=episode.search", "episode.search", "", "", "action");
@ -51,16 +52,12 @@ addList("Command", "Scene Exceptions", "?cmd=exceptions", "exceptions");
addList("Command", "History", "?cmd=history", "history");
addOption("Command", "History.Clear", "?cmd=history.clear", "", "", "action");
addOption("Command", "History.Trim", "?cmd=history.trim", "", "", "action");
addList("Command", "Failed", "?cmd=failed", "failed");
addOption("Command", "Backlog", "?cmd=backlog");
addList("Command", "PostProcess", "?cmd=postprocess", "postprocess", "", "","action");
addList("Command", "Logs", "?cmd=logs", "logs");
addList("Command", "Show", "?cmd=show", "indexerid");
addList("Command", "Show.AddExisting", "?cmd=show.addexisting", "show.addexisting", "", "", "action");
addList("Command", "Show.AddNew", "?cmd=show.addnew", "show.addnew", "", "", "action");
addList("Command", "Show.Cache", "?cmd=show.cache", "indexerid", "", "", "action");
addList("Command", "Show.Delete", "?cmd=show.delete", "indexerid", "", "", "action");
addList("Command", "Show.Delete", "?cmd=show.delete", "show.delete", "", "", "action");
addList("Command", "Show.GetBanner", "?cmd=show.getbanner", "indexerid", "", "", "action");
addList("Command", "Show.GetPoster", "?cmd=show.getposter", "indexerid", "", "", "action");
addList("Command", "Show.GetQuality", "?cmd=show.getquality", "indexerid", "", "", "action");
@ -85,14 +82,6 @@ addOption("logs", "Info", "&min_level=info");
addOption("logs", "Warning", "&min_level=warning");
addOption("logs", "Error", "&min_level=error");
addOption("postprocess", "Optional Param", "", 1);
addOption("postprocess", "C:\\PATH\\TO\\DIR", "&path=C:\\Temp");
addOption("postprocess", "return_data", "&return_data=1");
addOption("postprocess", "force_replace", "&force_replace=1");
addOption("postprocess", "is_priority", "&is_priority=1");
addOption("postprocess", "process_method", "&process_method=copy");
addOption("postprocess", "type", "&type=manual")
addOption("sb.setdefaults", "Optional Param", "", 1);
addList("sb.setdefaults", "Exclude Paused Shows on ComingEps", "&future_show_paused=0", "sb.setdefaults-status");
addList("sb.setdefaults", "Include Paused Shows on ComingEps", "&future_show_paused=1", "sb.setdefaults-status");
@ -104,14 +93,14 @@ addList("sb.setdefaults-status", "Archived", "&status=archived", "sb.setdefaults
addList("sb.setdefaults-status", "Ignored", "&status=ignored", "sb.setdefaults-opt");
addOption("sb.setdefaults-opt", "Optional Param", "", 1);
addList("sb.setdefaults-opt", "No Season Folder", "&season_folder=0", "quality");
addList("sb.setdefaults-opt", "Use Season Folder", "&season_folder=1", "quality");
addList("sb.setdefaults-opt", "Flatten (No Season Folder)", "&flatten_folders=1", "quality");
addList("sb.setdefaults-opt", "Use Season Folder", "&flatten_folders=0", "quality");
addOption("shows", "Optional Param", "", 1);
addOption("shows", "Show Only Paused", "&paused=1");
addOption("shows", "Show Only Not Paused", "&paused=0");
addOption("shows", "Sort by Show Name", "&sort=name");
addOption("shows", "Sort by TVDB ID", "&sort=id");
addOption("shows", "Sort by INDEXER ID", "&sort=id");
addList("show.addexisting", "C:\\temp\\show1", "&location=C:\\temp\\show1", "show.addexisting-indexerid");
addList("show.addexisting", "D:\\Temp\\show2", "&location=D:\\Temp\\show2", "show.addexisting-indexerid");
@ -122,8 +111,8 @@ addList("show.addexisting-indexerid", "101501 (Ancient Aliens)", "&indexerid=101
addList("show.addexisting-indexerid", "80348 (Chuck)", "&indexerid=80348", "show.addexisting-opt");
addOption("show.addexisting-opt", "Optional Param", "", 1);
addList("show.addexisting-opt", "No Season Folder", "&season_folder=0", "quality");
addList("show.addexisting-opt", "Use Season Folder", "&season_folder=1", "quality");
addList("show.addexisting-opt", "Flatten (No Season Folder)", "&flatten_folders=1", "quality");
addList("show.addexisting-opt", "Use Season Folder", "&flatten_folders=0", "quality");
addList("show.addnew", "101501 (Ancient Aliens)", "&indexerid=101501", "show.addnew-loc");
addList("show.addnew", "80348 (Chuck)", "&indexerid=80348", "show.addnew-loc");
@ -141,8 +130,8 @@ addList("show.addnew-status", "Archived", "&status=archived", "show.addnew-opt")
addList("show.addnew-status", "Ignored", "&status=ignored", "show.addnew-opt");
addOption("show.addnew-opt", "Optional Param", "", 1);
addList("show.addnew-opt", "No Season Folder", "&season_folder=0", "quality");
addList("show.addnew-opt", "Use Season Folder", "&season_folder=1", "quality");
addList("show.addnew-opt", "Flatten (No Season Folder)", "&flatten_folders=1", "quality");
addList("show.addnew-opt", "Use Season Folder", "&flatten_folders=0", "quality");
addOptGroup("sb.searchindexers", "Search by Name");
addList("sb.searchindexers", "Lost", "&name=Lost", "sb.searchindexers-lang");
@ -279,15 +268,25 @@ addList("episode.setstatus", "$curShow.name", "&indexerid=$curShow.indexerid", "
// build out each show's season+episode list for episode.setstatus cmd
#for $curShow in $episodeSQLResults:
#set $curSeason = -1
#for $curShowSeason in $episodeSQLResults[$curShow]:
#if $curShowSeason.season != $curSeason and $curShowSeason.season != 0:
// insert just the season as the ep number is now optional
addList("episode.setstatus-$curShow", "Season $curShowSeason.season", "&season=$curShowSeason.season", "episode-status-$curShow");
#end if
#set $curSeason = int($curShowSeason.season)
addList("episode.setstatus-$curShow", "$curShowSeason.season x $curShowSeason.episode", "&season=$curShowSeason.season&episode=$curShowSeason.episode", "episode-status-$curShow");
#end for
addOption("episode-status-$curShow", "Wanted", "&status=wanted");
addOption("episode-status-$curShow", "Skipped", "&status=skipped");
addOption("episode-status-$curShow", "Archived", "&status=archived");
addOption("episode-status-$curShow", "Ignored", "&status=ignored");
addList("episode-status-$curShow", "Wanted", "&status=wanted", "force");
addList("episode-status-$curShow", "Skipped", "&status=skipped", "force");
addList("episode-status-$curShow", "Archived", "&status=archived", "force");
addList("episode-status-$curShow", "Ignored", "&status=ignored", "force");
#end for
addOption("force", "Optional Param", "", 1);
addOption("force", "Replace Downloaded EP", "&force=1");
addOption("force", "Skip Downloaded EP", "&force=0");
addOption("future", "Optional Param", "", 1);
addList("future", "Sort by Date", "&sort=date", "future-type");
addList("future", "Sort by Network", "&sort=network", "future-type");
@ -323,15 +322,6 @@ addOption("history-limit", "Optional Param", "", 1);
addOption("history-limit", "Show Only Downloaded", "&type=downloaded");
addOption("history-limit", "Show Only Snatched", "&type=snatched");
addOption("failed", "Optional Param", "", 1);
//addOptGroup("failed", "Limit Results");
addList("failed", "Limit Results (2)", "&limit=2", "failed-limit");
addList("failed", "Limit Results (25)", "&limit=25", "failed-limit");
addList("failed", "Limit Results (50)", "&limit=50", "failed-limit");
//endOptGroup("failed");
addOption("failed-limit", "Optional Param", "", 1);
addOption("exceptions", "Optional Param", "", 1);
#for $curShow in $sortedShowList:
addOption("exceptions", "$curShow.name", "&indexerid=$curShow.indexerid");
@ -360,6 +350,12 @@ addOption("show.pause-opt", "Optional Param", "", 1);
addOption("show.pause-opt", "Unpause", "&pause=0");
addOption("show.pause-opt", "Pause", "&pause=1");
#for $curShow in $sortedShowList:
addList("show.delete", "$curShow.name", "&indexerid=$curShow.indexerid", "show.delete-opt");
#end for
addOption("show.delete-opt", "Remove Files", "&removefiles=1");
addOption("show.delete-opt", "Don't Remove Files", "&removefiles=0");
</script>
</head>
@ -386,8 +382,8 @@ addOption("show.pause-opt", "Pause", "&pause=1");
<select name="sixthlevel"><option></option></select>
<select name="seventhlevel"><option></option></select>
<div style="float: left; ">
<input class="btn" type="button" value="Reset" onclick="resetListGroup('api',1)" />
<input class="btn" type="button" value="Go" onclick="goListGroup(this.form['apikey'].value, this.form['seventhlevel'].value, this.form['sixthlevel'].value, this.form['fifthlevel'].value, this.form['forthlevel'].value, this.form['thirdlevel'].value, this.form['secondlevel'].value, this.form['firstlevel'].value)" />
<input type="button" value="Reset" onclick="resetListGroup('api',1)" />
<input type="button" value="Go" onclick="goListGroup(this.form['apikey'].value, this.form['seventhlevel'].value, this.form['sixthlevel'].value, this.form['fifthlevel'].value, this.form['forthlevel'].value, this.form['thirdlevel'].value, this.form['secondlevel'].value, this.form['firstlevel'].value)" />
</div>
</td>
</tr>
@ -398,4 +394,4 @@ addOption("show.pause-opt", "Pause", "&pause=1");
</body>
</html>
</html>

View File

@ -32,93 +32,93 @@
<div class="component-group">
<div class="component-group-desc">
<img class="notifier-icon" src="$sbRoot/images/notifiers/xbmc.png" alt="" title="XBMC" />
<h3><a href="<%= anon_url('http://kodi.tv/') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">XBMC</a></h3>
<img class="notifier-icon" src="$sbRoot/images/notifiers/kodi.png" alt="" title="KODI" />
<h3><a href="<%= anon_url('http://kodi.tv/') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">KODI</a></h3>
<p>A free and open source cross-platform media center and home entertainment system software with a 10-foot user interface designed for the living-room TV.</p>
</div>
<fieldset class="component-group-list">
<div class="field-pair">
<label class="cleafix" for="use_xbmc">
<label class="cleafix" for="use_kodi">
<span class="component-title">Enable</span>
<span class="component-desc">
<input type="checkbox" class="enabler" name="use_xbmc" id="use_xbmc" #if $sickbeard.USE_XBMC then "checked=\"checked\"" else ""# />
<p>should SickRage send XBMC commands ?<p>
<input type="checkbox" class="enabler" name="use_kodi" id="use_kodi" #if $sickbeard.USE_KODI then "checked=\"checked\"" else ""# />
<p>should SickRage send KODI commands ?<p>
</span>
</label>
</div>
<div id="content_use_xbmc">
<div id="content_use_kodi">
<div class="field-pair">
<label for="xbmc_always_on">
<label for="kodi_always_on">
<span class="component-title">Always on</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_always_on" id="xbmc_always_on" #if $sickbeard.XBMC_ALWAYS_ON then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_always_on" id="kodi_always_on" #if $sickbeard.KODI_ALWAYS_ON then "checked=\"checked\"" else ""# />
<p>log errors when unreachable ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_notify_onsnatch">
<label for="kodi_notify_onsnatch">
<span class="component-title">Notify on snatch</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_notify_onsnatch" id="xbmc_notify_onsnatch" #if $sickbeard.XBMC_NOTIFY_ONSNATCH then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_notify_onsnatch" id="kodi_notify_onsnatch" #if $sickbeard.KODI_NOTIFY_ONSNATCH then "checked=\"checked\"" else ""# />
<p>send a notification when a download starts ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_notify_ondownload">
<label for="kodi_notify_ondownload">
<span class="component-title">Notify on download</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_notify_ondownload" id="xbmc_notify_ondownload" #if $sickbeard.XBMC_NOTIFY_ONDOWNLOAD then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_notify_ondownload" id="kodi_notify_ondownload" #if $sickbeard.KODI_NOTIFY_ONDOWNLOAD then "checked=\"checked\"" else ""# />
<p>send a notification when a download finishes ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_notify_onsubtitledownload">
<label for="kodi_notify_onsubtitledownload">
<span class="component-title">Notify on subtitle download</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_notify_onsubtitledownload" id="xbmc_notify_onsubtitledownload" #if $sickbeard.XBMC_NOTIFY_ONSUBTITLEDOWNLOAD then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_notify_onsubtitledownload" id="kodi_notify_onsubtitledownload" #if $sickbeard.KODI_NOTIFY_ONSUBTITLEDOWNLOAD then "checked=\"checked\"" else ""# />
<p>send a notification when subtitles are downloaded ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_update_library">
<label for="kodi_update_library">
<span class="component-title">Update library</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_update_library" id="xbmc_update_library" #if $sickbeard.XBMC_UPDATE_LIBRARY then "checked=\"checked\"" else ""# />
<p>update XBMC library when a download finishes ?</p>
<input type="checkbox" name="kodi_update_library" id="kodi_update_library" #if $sickbeard.KODI_UPDATE_LIBRARY then "checked=\"checked\"" else ""# />
<p>update KODI library when a download finishes ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_update_full">
<label for="kodi_update_full">
<span class="component-title">Full library update</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_update_full" id="xbmc_update_full" #if $sickbeard.XBMC_UPDATE_FULL then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_update_full" id="kodi_update_full" #if $sickbeard.KODI_UPDATE_FULL then "checked=\"checked\"" else ""# />
<p>perform a full library update if update per-show fails ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_update_onlyfirst">
<label for="kodi_update_onlyfirst">
<span class="component-title">Only update first host</span>
<span class="component-desc">
<input type="checkbox" name="xbmc_update_onlyfirst" id="xbmc_update_onlyfirst" #if $sickbeard.XBMC_UPDATE_ONLYFIRST then "checked=\"checked\"" else ""# />
<input type="checkbox" name="kodi_update_onlyfirst" id="kodi_update_onlyfirst" #if $sickbeard.KODI_UPDATE_ONLYFIRST then "checked=\"checked\"" else ""# />
<p>only send library updates to the first active host ?</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_host">
<span class="component-title">XBMC IP:Port</span>
<input type="text" name="xbmc_host" id="xbmc_host" value="$sickbeard.XBMC_HOST" class="form-control input-sm input350" />
<label for="kodi_host">
<span class="component-title">KODI IP:Port</span>
<input type="text" name="kodi_host" id="kodi_host" value="$sickbeard.KODI_HOST" class="form-control input-sm input350" />
</label>
<label>
<span class="component-title">&nbsp;</span>
<span class="component-desc">host running XBMC (eg. 192.168.1.100:8080)</span>
<span class="component-desc">host running KODI (eg. 192.168.1.100:8080)</span>
</label>
<label>
<span class="component-title">&nbsp;</span>
@ -126,33 +126,33 @@
</label>
</div>
<div class="field-pair">
<label for="xbmc_username">
<span class="component-title">XBMC username</span>
<input type="text" name="xbmc_username" id="xbmc_username" value="$sickbeard.XBMC_USERNAME" class="form-control input-sm input250" />
<label for="kodi_username">
<span class="component-title">KODI username</span>
<input type="text" name="kodi_username" id="kodi_username" value="$sickbeard.KODI_USERNAME" class="form-control input-sm input250" />
</label>
<label>
<span class="component-title">&nbsp;</span>
<span class="component-desc">username for your XBMC server (blank for none)</span>
<span class="component-desc">username for your KODI server (blank for none)</span>
</label>
</div>
<div class="field-pair">
<label for="xbmc_password">
<span class="component-title">XBMC password</span>
<input type="password" name="xbmc_password" id="xbmc_password" value="$sickbeard.XBMC_PASSWORD" class="form-control input-sm input250" />
<label for="kodi_password">
<span class="component-title">KODI password</span>
<input type="password" name="kodi_password" id="kodi_password" value="$sickbeard.KODI_PASSWORD" class="form-control input-sm input250" />
</label>
<label>
<span class="component-title">&nbsp;</span>
<span class="component-desc">password for your XBMC server (blank for none)</span>
<span class="component-desc">password for your KODI server (blank for none)</span>
</label>
</div>
<div class="testNotification" id="testXBMC-result">Click below to test.</div>
<input class="btn" type="button" value="Test XBMC" id="testXBMC" />
<div class="testNotification" id="testKODI-result">Click below to test.</div>
<input class="btn" type="button" value="Test KODI" id="testKODI" />
<input type="submit" class="config_submitter btn" value="Save Changes" />
</div><!-- /content_use_xbmc //-->
</div><!-- /content_use_kodi //-->
</fieldset>
</div><!-- /xbmc component-group //-->
</div><!-- /kodi component-group //-->
<div class="component-group">
@ -160,7 +160,7 @@
<img class="notifier-icon" src="$sbRoot/images/notifiers/plex.png" alt="" title="Plex Media Server" />
<h3><a href="<%= anon_url('http://www.plexapp.com/') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">Plex Media Server</a></h3>
<p>Experience your media on a visually stunning, easy to use interface on your Mac connected to your TV. Your media library has never looked this good!</p>
<p class="plexinfo hide">For sending notifications to Plex Home Theater (PHT) clients, use the XBMC notifier with port <b>3005</b>.</p>
<p class="plexinfo hide">For sending notifications to Plex Home Theater (PHT) clients, use the KODI notifier with port <b>3005</b>.</p>
</div>
<fieldset class="component-group-list">
<div class="field-pair">

View File

@ -61,7 +61,7 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
#end if
</ul>
<div id="core-component-group1" class="component-group">
<div id="core-component-group1" class="component-group" style='min-height: 550px;'>
<div class="component-group-desc">
<h3>Provider Priorities</h3>
@ -75,9 +75,9 @@ var show_nzb_providers = #if $sickbeard.USE_NZBS then "true" else "false"#;
#end if
<div>
<h4 class="note">*</h4><p class="note">Provider does not support backlog searches at this time.</p>
<h4 class="note">**</h4><p class="note">Provider supports <b>limited</b> backlog searches, all episodes/qualities may not be available.</p>
<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b>.</p>
<p class="note">* Provider does not support backlog searches at this time.</p>
<p class="note">** Provider supports <b>limited</b> backlog searches, all episodes/qualities may not be available.</p>
<p class="note">! Provider is <b>NOT WORKING</b>.</p>
</div>
</div>

View File

@ -51,6 +51,15 @@
</span>
</label>
</div>
<div class="field-pair">
<label for="randomize_providers">
<span class="component-title">Randomize Providers</span>
<span class="component-desc">
<input type="checkbox" name="randomize_providers" id="randomize_providers" class="enabler" <%= html_checked if sickbeard.RANDOMIZE_PROVIDERS == True else '' %>/>
<p>randomize the provider search order instead of going in order of placement</p>
</span>
</label>
</div>
<div id="content_download_propers">
<div class="field-pair">
<label for="check_propers_interval">

View File

@ -106,6 +106,13 @@
<span class="component-desc">Log downloaded Subtitle on History page?</span>
</label>
</div>
<div class="field-pair">
<input type="checkbox" name="subtitles_multi" id="subtitles_multi" #if $sickbeard.SUBTITLES_MULTI then " checked=\"checked\"" else ""#/>
<label class="clearfix" for="subtitles_multi">
<span class="component-title">Subtitles Multi-Language</span>
<span class="component-desc">Append language codes to subtitle filenames?</span>
</label>
</div>
<br/><input type="submit" class="btn config_submitter" value="Save Changes" /><br/>
</div>
</fieldset>

View File

@ -244,7 +244,7 @@
</tr>
#end if
<tr><td class="showLegend">Size:</td><td>$sickbeard.helpers.human(sickbeard.helpers.get_size($showLoc[0]))</td></tr>
<tr><td class="showLegend">Size:</td><td>$sickbeard.helpers.pretty_filesize(sickbeard.helpers.get_size($showLoc[0]))</td></tr>
</table>

View File

@ -128,7 +128,7 @@
<tr>
#set $curdatetime = $datetime.datetime.strptime(str($hItem["date"]), $history.dateFormat)
<td align="center"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort_data">$time.mktime($curdatetime.timetuple())</span></td>
<td class="tvShow" width="35%"><a href="$sbRoot/home/displayShow?show=$hItem["showid"]#season-$hItem["season"]">$hItem["show_name"] - <%="S%02i" % int(hItem["season"])+"E%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower or "repack" in $hItem["resource"].lower then ' <span class="quality Proper">Proper</span>' else ""#</a></td>
<td class="tvShow" width="35%"><a href="$sbRoot/home/displayShow?show=$hItem["showid"]#season-$hItem["season"]">$hItem["show_name"] - <%="S%02i" % int(hItem["season"])+"E%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower() or "repack" in $hItem["resource"].lower() then ' <span class="quality Proper">Proper</span>' else ""#</a></td>
<td align="center" #if $curStatus == SUBTITLED then 'class="subtitles_column"' else ''#>
#if $curStatus == SUBTITLED:
<img width="16" height="11" style="vertical-align:middle;" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4]+'.png'%>">
@ -190,7 +190,7 @@
#set $curdatetime = $datetime.datetime.strptime(str($hItem["actions"][0]["time"]), $history.dateFormat)
<td align="center"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdatetime($curdatetime, show_seconds=True)</div><span class="sort_data">$time.mktime($curdatetime.timetuple())</span></td>
<td class="tvShow" width="25%">
<span><a href="$sbRoot/home/displayShow?show=$hItem["show_id"]#season-$hItem["season"]">$hItem["show_name"] - <%="S%02i" % int(hItem["season"])+"E%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower or "repack" in $hItem["resource"].lower then ' <span class="quality Proper">Proper</span>' else ""#</a></span>
<span><a href="$sbRoot/home/displayShow?show=$hItem["show_id"]#season-$hItem["season"]">$hItem["show_name"] - <%="S%02i" % int(hItem["season"])+"E%02i" % int(hItem["episode"]) %>#if "proper" in $hItem["resource"].lower() or "repack" in $hItem["resource"].lower() then ' <span class="quality Proper">Proper</span>' else ""#</a></span>
</td>
<td align="center" provider="<%=str(sorted(hItem["actions"])[0]["provider"])%>">
#for $action in sorted($hItem["actions"]):

View File

@ -214,6 +214,10 @@
<option value="$sbRoot/setHomeLayout/?layout=banner" #if $sickbeard.HOME_LAYOUT == "banner" then "selected=\"selected\"" else ""#>Banner</option>
<option value="$sbRoot/setHomeLayout/?layout=simple" #if $sickbeard.HOME_LAYOUT == "simple" then "selected=\"selected\"" else ""#>Simple</option>
</select>
#if $layout != 'poster':
Search:
<input class="search form-control form-control-inline input-sm input200" type="search" data-column="1" placeholder="Search Show Name"> <button type="button" class="resetshows resetanime btn btn-inline">Reset Search</button>
#end if
</span>
#if $layout == 'poster':
@ -238,12 +242,6 @@
#end if
</div>
#if $layout != 'poster':
<div class="pull-right">
<input class="search form-control form-control-inline input-sm input200" type="search" data-column="1" placeholder="Search Show Name"> <button type="button" class="resetshows resetanime btn btn-inline">Reset Search</button>
</div>
#end if
#for $curShowlist in $showlists:
#set $curListType = $curShowlist[0]
#set $myShowList = $list($curShowlist[1])
@ -603,7 +601,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
</td>
<td align="center">
<img src="$sbRoot/images/#if int($curShow.paused) == 0 and $curShow.status != "Ended" then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" />
<img src="$sbRoot/images/#if int($curShow.paused) == 0 and $curShow.status == "Continuing" then "yes16.png\" alt=\"Y\"" else "no16.png\" alt=\"N\""# width="16" height="16" />
</td>
<td align="center">

View File

@ -44,11 +44,11 @@
<input type="hidden" id="indexer_timeout" value="$sickbeard.INDEXER_TIMEOUT" />
#if $use_provided_info:
Show retrieved from existing metadata: <a href="<%= anon_url(sickbeard.indexerApi(provided_indexer).config['show_url'], provided_indexer_id) %>">$provided_indexer_name</a>
<input type="hidden" name="indexerLang" value="en" />
<input type="hidden" name="whichSeries" value="$provided_indexer_id" />
Show retrieved from existing metadata: <a href="$anon_url($sickbeard.indexerApi($provided_indexer).config['show_url'], $provided_indexer_id)">$provided_indexer_name</a>
<input type="hidden" id="indexerLang" name="indexerLang" value="en" />
<input type="hidden" id="whichSeries" name="whichSeries" value="$provided_indexer_id" />
<input type="hidden" id="providedIndexer" name="providedIndexer" value="$provided_indexer" />
<input type="hidden" id="providedName" value="$provided_indexer_name" />
<input type="hidden" id="providedIndexer" value="$provided_indexer" />
#else:
<input type="text" id="nameToSearch" value="$default_show_name" class="form-control form-control-inline input-sm input350" />

View File

@ -14,6 +14,7 @@
#import os.path
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
<script type="text/javascript" src="$sbRoot/js/addTrendingShow.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/rootDirs.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
@ -90,7 +91,6 @@
<h1 class="title">$title</h1>
#end if
#if $trending_shows
<div id="tabs">
<ul>
<li><a href="#tabs-1">Manage Directories</a></li>
@ -119,46 +119,10 @@
<option value="desc">Desc</option>
</select>
</div>
#end if
<div id="container">
#if None is $trending_shows
<div class="trakt_show" style="width:100%; margin-top:20px">
<p class="red-text">Trakt API did not return results, this can happen from time to time.
<br /><br />This view should auto refresh every 10 mins.</p>
</div>
#else
#for $cur_show in $trending_shows:
#if not 'ExistsInLibrary' in $cur_show['tvdb_id']:
#set $image = re.sub(r'(?im)(.*)(\..*?)$', r'\1-300\2', $cur_show['images']['poster'], 0)
<div class="trakt_show" data-name="$cur_show['title']" data-rating="$cur_show['ratings']['percentage']" data-votes="$cur_show['ratings']['votes']">
<div class="traktContainer">
<div class="trakt-image">
<a class="trakt-image" href="<%= anon_url(cur_show['url']) %>" target="_blank"><img alt="" class="trakt-image" src="${image}" /></a>
</div>
<div class="show-title">
<%= (cur_show['title'], '<span>&nbsp;</span>')[ '' == cur_show['title']] %>
</div>
<div class="clearfix">
<p>$cur_show['ratings']['percentage']% <img src="$sbRoot/images/heart.png"></p>
<i>$cur_show['ratings']['votes'] votes</i>
<div class="traktShowTitleIcons">
<a href="$sbRoot/home/addTraktShow?indexer_id=${cur_show['tvdb_id']}&amp;showName=${cur_show['title']}" class="btn btn-xs">Add Show</a>
</div>
</div>
</div>
</div>
#end if
#end for
#end if
</div>
<br />
<div id="trendingShows"></div>
<br />
<script type="text/javascript" charset="utf-8">
<!--

View File

@ -27,7 +27,7 @@
#set $sql_result = $myDB.select($sql_statement)
#set $shows_total = len($sickbeard.showList)
#set $shows_active = len([show for show in $sickbeard.showList if show.paused == 0 and show.status != "Ended"])
#set $shows_active = len([show for show in $sickbeard.showList if show.paused == 0 and show.status == "Continuing"])
#if $sql_result:
#set $ep_snatched = $sql_result[0]['ep_snatched']

View File

@ -109,8 +109,8 @@
\$("#SubMenu a:contains('Manage Torrents')").addClass('btn').html('<span class="submenu-icon-bittorrent pull-left"></span> Manage Torrents');
\$("#SubMenu a[href$='/manage/failedDownloads/']").addClass('btn').html('<span class="submenu-icon-failed-download pull-left"></span> Failed Downloads');
\$("#SubMenu a:contains('Notification')").addClass('btn').html('<span class="ui-icon ui-icon-note pull-left"></span> Notifications');
\$("#SubMenu a:contains('Update show in XBMC')").addClass('btn').html('<span class="submenu-icon-xbmc pull-left"></span> Update show in XBMC');
\$("#SubMenu a[href$='/home/updateXBMC/']").addClass('btn').html('<span class="submenu-icon-xbmc pull-left"></span> Update XBMC');
\$("#SubMenu a:contains('Update show in KODI')").addClass('btn').html('<span class="submenu-icon-kodi pull-left"></span> Update show in KODI');
\$("#SubMenu a[href$='/home/updateKODI/']").addClass('btn').html('<span class="submenu-icon-kodi pull-left"></span> Update KODI');
}
\$(document).ready(function() {
@ -140,6 +140,7 @@
<a class="navbar-brand" href="$sbRoot/home/" title="SickRage"><img alt="SickRage" src="$sbRoot/images/sickrage.png" style="height: 50px;" class="img-responsive pull-left" /></a>
</div>
#if $sbLogin:
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li id="NAVhome" class="dropdown">
@ -169,8 +170,8 @@
#if $sickbeard.USE_PLEX and $sickbeard.PLEX_SERVER_HOST != "":
<li><a href="$sbRoot/home/updatePLEX/"><i class="menu-icon-backlog-view"></i>&nbsp;Update PLEX</a></li>
#end if
#if $sickbeard.USE_XBMC and $sickbeard.XBMC_HOST != "":
<li><a href="$sbRoot/home/updateXBMC/"><i class="menu-icon-xbmc"></i>&nbsp;Update XBMC</a></li>
#if $sickbeard.USE_KODI and $sickbeard.KODI_HOST != "":
<li><a href="$sbRoot/home/updateKODI/"><i class="menu-icon-kodi"></i>&nbsp;Update KODI</a></li>
#end if
#if $sickbeard.USE_TORRENTS and $sickbeard.TORRENT_METHOD != 'blackhole' \
and ($sickbeard.ENABLE_HTTPS and $sickbeard.TORRENT_HOST[:5] == 'https' \
@ -212,14 +213,15 @@
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><img src="$sbRoot/images/menu/system18-2.png" class="navbaricon hidden-xs" /><b class="caret hidden-xs"></b><span class="visible-xs">System <b class="caret"></b></span></a>
<ul class="dropdown-menu">
<li><a href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="menu-icon-update"></i>&nbsp;Force Version Check</a></li>
<li><a href="$sbRoot/home/updateCheck?pid=$sbPID"><i class="menu-icon-update"></i>&nbsp;Check For Updates</a></li>
<li><a href="$sbRoot/home/restart/?pid=$sbPID" class="confirm restart"><i class="menu-icon-restart"></i>&nbsp;Restart</a></li>
<li><a href="$sbRoot/home/shutdown/?pid=$sbPID" class="confirm shutdown"><i class="menu-icon-shutdown"></i>&nbsp;Shutdown</a></li>
<li><a href="$sbRoot/logout" class="confirm logout"><i class="menu-icon-shutdown"></i>&nbsp;Logout</a></li>
</ul>
</li>
<li id="donate"><a href="http://sr-upgrade.appspot.com" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href); return false;"><img src="$sbRoot/images/donate.jpg" alt="[donate]" class="navbaricon hidden-xs" /></a></li>
</ul>
#end if
</div><!-- /.navbar-collapse -->
</div><!-- /.container-fluid -->
</nav>

View File

@ -0,0 +1,21 @@
#import sickbeard
#set global $title="Login"
#set global $sbPath = ".."
#set global $topmenu="login"#
#import os.path
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
<div class="login">
<form action="" method="post">
<h1>SickRage</h1>
<div class="ctrlHolder"><input class="inlay" name="username" type="text" placeholder="Username" autocomplete="off" /></div>
<div class="ctrlHolder"><input class="inlay" name="password" type="password" placeholder="Password" autocomplete="off" /></div>
<div class="ctrlHolder">
<label class="remember_me" title="for 30 days"><input class="inlay" id="remember_me" name="remember_me" type="checkbox" value="1" checked="checked" /> Remember me</label>
<input class="button" name="submit" type="submit" value="Login" />
</div>
</form>
</div>

View File

@ -47,10 +47,6 @@ In Progress<br />
#end if
<br />
<h3>Version Check:</h3>
<a class="btn" href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Check</a>
<br /> <br />
<h3>Search Queue:</h3>
Backlog: <i>$queueLength['backlog'] pending items</i></br>
Daily: <i>$queueLength['daily'] pending items</i></br>

View File

@ -0,0 +1,102 @@
#import sickbeard
#import datetime
#import re
#import os.path
#from sickbeard.common import *
#from sickbeard import sbdatetime
#from sickbeard.helpers import anon_url
<script type="text/javascript" charset="utf-8">
<!--
\$(document).ready(function(){
// initialise combos for dirty page refreshes
\$('#showsort').val('original');
\$('#showsortdirection').val('asc');
var \$container = [\$('#container')];
jQuery.each(\$container, function (j) {
this.isotope({
itemSelector: '.trakt_show',
sortBy: 'original-order',
layoutMode: 'fitRows',
getSortData: {
name: function( itemElem ) {
var name = \$( itemElem ).attr('data-name') || '';
#if not $sickbeard.SORT_ARTICLE:
name = name.replace(/^(The|A|An)\s/i, '');
#end if
return name.toLowerCase();
},
rating: '[data-rating] parseInt',
votes: '[data-votes] parseInt',
}
});
});
\$('#showsort').on( 'change', function() {
var sortCriteria;
switch (this.value) {
case 'original':
sortCriteria = 'original-order'
break;
case 'rating':
/* randomise, else the rating_votes can already
* have sorted leaving this with nothing to do.
*/
\$('#container').isotope({sortBy: 'random'});
sortCriteria = 'rating';
break;
case 'rating_votes':
sortCriteria = ['rating', 'votes'];
break;
case 'votes':
sortCriteria = 'votes';
break;
default:
sortCriteria = 'name'
break;
}
\$('#container').isotope({sortBy: sortCriteria});
});
\$('#showsortdirection').on( 'change', function() {
\$('#container').isotope({sortAscending: ('asc' == this.value)});
});
});
//-->
</script>
<div id="container">
#if not $trending_shows
<div class="trakt_show" style="width:100%; margin-top:20px">
<p class="red-text">Trakt API did not return any results, please check your config.
</div>
#else
#for $cur_show in $trending_shows:
#set $image = re.sub(r'(?im)(.*)(\..*?)$', r'\1-300\2', $cur_show['images']['poster'], 0)
<div class="trakt_show" data-name="$cur_show['title']" data-rating="$cur_show['ratings']['percentage']" data-votes="$cur_show['ratings']['votes']">
<div class="traktContainer">
<div class="trakt-image">
<a class="trakt-image" href="<%= anon_url(cur_show['url']) %>" target="_blank"><img alt="" class="trakt-image" src="${image}" /></a>
</div>
<div class="show-title">
<%= (cur_show['title'], '<span>&nbsp;</span>')[ '' == cur_show['title']] %>
</div>
<div class="clearfix">
<p>$cur_show['ratings']['percentage']% <img src="$sbRoot/images/heart.png"></p>
<i>$cur_show['ratings']['votes'] votes</i>
<div class="traktShowTitleIcons">
<a href="$sbRoot/home/addShows/addTraktShow?indexer_id=${cur_show['tvdb_id'] or cur_show['tvrage_id']}&amp;showName=${cur_show['title']}" class="btn btn-xs">Add Show</a>
</div>
</div>
</div>
</div>
#end for
#end if
</div>

View File

@ -1,49 +1,50 @@
$(document).ready(function() {
$(document).ready(function() {
$('#tableDiv').on('click', '#checkAll', function() {
$('#checkAll').live('click', function(){
var seasCheck = this;
$('.dirCheck').each(function(){
this.checked = seasCheck.checked;
$('.dirCheck').each(function() {
this.checked = seasCheck.checked;
});
});
$('#submitShowDirs').click(function(){
$('#submitShowDirs').click(function() {
var dirArr = new Array();
$('.dirCheck').each(function(i,w) {
if (this.checked == true) {
var show = $(this).attr('id');
var indexer = $(this).closest('tr').find('select').val();
dirArr.push(encodeURIComponent(indexer + '|' + show));
}
});
if (this.checked == true) {
var show = $(this).attr('id');
var indexer = $(this).closest('tr').find('select').val();
dirArr.push(encodeURIComponent(indexer + '|' + show));
}
});
if (dirArr.length == 0)
if (dirArr.length == 0) {
return false;
}
url = sbRoot+'/home/addShows/addExistingShows?promptForSettings='+ ($('#promptForSettings').prop('checked') ? 'on' : 'off');
url += '&shows_to_add='+dirArr.join('&shows_to_add=');
url = sbRoot + '/home/addShows/addExistingShows?promptForSettings=' + ($('#promptForSettings').prop('checked') ? 'on' : 'off');
url += '&shows_to_add=' + dirArr.join('&shows_to_add=');
window.location.href = url;
});
function loadContent() {
var url = '';
$('.dir_check').each(function(i,w){
$('.dir_check').each(function(i,w) {
if ($(w).is(':checked')) {
if (url.length)
if (url.length) {
url += '&';
}
url += 'rootDir=' + encodeURIComponent($(w).attr('id'));
}
});
$('#tableDiv').html('<img id="searchingAnim" src="' + sbRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" /> loading folders...');
$.get(sbRoot+'/home/addShows/massAddTable', url, function(data) {
$('#tableDiv').html('<img id="searchingAnim" src="' + sbRoot + '/images/loading32.gif" height="32" width="32" /> loading folders...');
$.get(sbRoot + '/home/addShows/massAddTable/', url, function(data) {
$('#tableDiv').html(data);
$("#addRootDirTable").tablesorter({
//sortList: [[1,0]],
@ -58,21 +59,24 @@ $(document).ready(function() {
var last_txt = '';
$('#rootDirText').change(function() {
if (last_txt == $('#rootDirText').val())
if (last_txt == $('#rootDirText').val()) {
return false;
else
} else {
last_txt = $('#rootDirText').val();
$('#rootDirStaticList').html('');
}
$('#rootDirStaticList').html('');
$('#rootDirs option').each(function(i, w) {
$('#rootDirStaticList').append('<li class="ui-state-default ui-corner-all"><input type="checkbox" class="dir_check" id="'+$(w).val()+'" checked=checked> <label for="'+$(w).val()+'" style="color:#09A2FF;"><b>'+$(w).val()+'</b></label></li>')
$('#rootDirStaticList').append('<li class="ui-state-default ui-corner-all"><input type="checkbox" class="cb dir_check" id="' + $(w).val() + '" checked=checked> <label for="' + $(w).val() + '"><b>' + $(w).val() + '</b></label></li>');
});
loadContent();
});
$('.dir_check').live('click', loadContent);
$('.showManage').live('click', function() {
$( "#tabs" ).tabs( 'select', 0 );
$('#rootDirStaticList').on('click', '.dir_check', loadContent);
$('#tableDiv').on('click', '.showManage', function(event) {
event.preventDefault();
$("#tabs").tabs('option', 'active', 0);
$('html,body').animate({scrollTop:0}, 1000);
});
});

View File

@ -0,0 +1,21 @@
$(document).ready(function() {
var trendingRequestXhr = null;
function loadContent() {
if (trendingRequestXhr) trendingRequestXhr.abort();
$('#trendingShows').html('<img id="searchingAnim" src="' + sbRoot + '/images/loading32' + themeSpinner + '.gif" height="32" width="32" /> loading trending shows...');
trendingRequestXhr = $.ajax({
url: sbRoot + '/home/addShows/getTrendingShows/',
timeout: 60 * 1000,
error: function () {
$('#trendingShows').empty().html('Trakt timed out, refresh page to try again');
},
success: function (data) {
$('#trendingShows').html(data);
}
});
}
loadContent();
});

View File

@ -1,4 +1,4 @@
var search_status_url = sbRoot + '/getManualSearchStatus';
var search_status_url = sbRoot + '/home/getManualSearchStatus';
PNotify.prototype.options.maxonscreen = 5;
$.fn.manualSearches = [];

View File

@ -11,16 +11,16 @@ var _disable_empty_list=false;
var _hide_empty_list=false;
function goListGroup(apikey, L7, L6, L5, L4, L3, L2, L1){
var GlobalOptions = "";
var html, GlobalOptions = "";
$('.global').each(function(){
var checked = $(this).prop('checked');
if(checked) {
var globalID = $(this).attr('id');
// handle jsonp/callback global option differently
if(globalID == "jsonp") {
GlobalOptions = GlobalOptions + "&" + globalID + "=foo";
GlobalOptions = GlobalOptions + "&" + globalID + "=foo";
} else {
GlobalOptions = GlobalOptions + "&" + globalID + "=1";
GlobalOptions = GlobalOptions + "&" + globalID + "=1";
}
}
});
@ -28,7 +28,7 @@ function goListGroup(apikey, L7, L6, L5, L4, L3, L2, L1){
// handle the show.getposter / show.getbanner differently as they return an image and not json
if (L1 == "?cmd=show.getposter" || L1 == "?cmd=show.getbanner") {
var imgcache = sbRoot + "/api/" + apikey + "/" + L1 + L2 + GlobalOptions;
var html = imgcache + '<br/><br/><img src="' + sbRoot + '/images/loading16.gif" id="imgcache">';
html = imgcache + '<br/><br/><img src="' + sbRoot + '/images/loading16.gif" id="imgcache">';
$('#apiResponse').html(html);
$.ajax({
url: sbRoot + "/api/" + apikey + "/" + L1 + L2 + GlobalOptions,
@ -38,14 +38,14 @@ function goListGroup(apikey, L7, L6, L5, L4, L3, L2, L1){
success: function (img) {
$('#imgcache').attr('src', imgcache);
}
})
});
}
else {
var html = sbRoot + "/api/" + apikey + "/" + L1 + L2 + L3 + L4 + L5 + L6 + L7 + GlobalOptions + "<br/><pre>";
html = sbRoot + "/api/" + apikey + "/" + L1 + L2 + L3 + L4 + L5 + L6 + L7 + GlobalOptions + "<br/><pre>";
html += $.ajax({
url: sbRoot + "/api/" + apikey + "/" + L1 + L2 + L3 + L4 + L5 + L6 + L7 + GlobalOptions,
async: false,
dataType: "html",
dataType: "html"
}).responseText;
html += '</pre>';
@ -189,13 +189,13 @@ function cs_getCookie(name) {
function cs_optionOBJ(type,text,value,label,css) { this.type=type; this.text=text; this.value=value; this.label=label; this.css=css; }
function cs_getOptions(menu,list) {
var opt=new Array();
var opt=[];
for (var i=0; i<menu.items.length; i++) {
opt[i]=new cs_optionOBJ(menu.items[i].type, menu.items[i].dis, menu.items[i].link, menu.items[i].label, menu.items[i].css);
}
if (opt.length==0 && menu.name!="") {
cs_getSubList(menu.name,list);
opt[0]=new cs_optionOBJ(cs_L, "loading ...", "", "", "");
//opt[0]=new cs_optionOBJ(cs_L, "loading ...", "", "", "");
}
return opt;
}
@ -638,4 +638,4 @@ function selectOptions(n,opts,mode) {
}
}
}
// ------
// ------

View File

@ -58,7 +58,7 @@ $(document).ready(function(){
$('#api_key').click(function(){ $('#api_key').select() });
$("#generate_new_apikey").click(function(){
$.get(sbRoot + '/config/general/generateKey',
$.get(sbRoot + '/config/general/generateApiKey',
function(data){
if (data.error != undefined) {
alert(data.error);

View File

@ -5,7 +5,7 @@ $(document).ready(function(){
$("#Backup").attr("disabled", true);
$('#Backup-result').html(loading);
var backupDir = $("#backupDir").val();
$.get(sbRoot + "/config/backup", {'backupDir': backupDir})
$.get(sbRoot + "/config/backuprestore/backup", {'backupDir': backupDir})
.done(function (data) {
$('#Backup-result').html(data);
$("#Backup").attr("disabled", false);
@ -15,7 +15,7 @@ $(document).ready(function(){
$("#Restore").attr("disabled", true);
$('#Restore-result').html(loading);
var backupFile = $("#backupFile").val();
$.get(sbRoot + "/config/restore", {'backupFile': backupFile})
$.get(sbRoot + "/config/backuprestore/restore", {'backupFile': backupFile})
.done(function (data) {
$('#Restore-result').html(data);
$("#Restore").attr("disabled", false);

View File

@ -37,22 +37,22 @@ $(document).ready(function(){
});
});
$('#testXBMC').click(function () {
var xbmc_host = $.trim($('#xbmc_host').val());
var xbmc_username = $.trim($('#xbmc_username').val());
var xbmc_password = $.trim($('#xbmc_password').val());
if (!xbmc_host) {
$('#testXBMC-result').html('Please fill out the necessary fields above.');
$('#xbmc_host').addClass('warning');
$('#testKODI').click(function () {
var kodi_host = $.trim($('#kodi_host').val());
var kodi_username = $.trim($('#kodi_username').val());
var kodi_password = $.trim($('#kodi_password').val());
if (!kodi_host) {
$('#testKODI-result').html('Please fill out the necessary fields above.');
$('#kodi_host').addClass('warning');
return;
}
$('#xbmc_host').removeClass('warning');
$('#kodi_host').removeClass('warning');
$(this).prop('disabled', true);
$('#testXBMC-result').html(loading);
$.get(sbRoot + '/home/testXBMC', {'host': xbmc_host, 'username': xbmc_username, 'password': xbmc_password})
$('#testKODI-result').html(loading);
$.get(sbRoot + '/home/testKODI', {'host': kodi_host, 'username': kodi_username, 'password': kodi_password})
.done(function (data) {
$('#testXBMC-result').html(data);
$('#testXBMC').prop('disabled', false);
$('#testKODI-result').html(data);
$('#testKODI').prop('disabled', false);
});
});

View File

@ -0,0 +1,3 @@
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)

View File

@ -0,0 +1,23 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Execute computations asynchronously using threads or processes."""
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
from concurrent.futures._base import (FIRST_COMPLETED,
FIRST_EXCEPTION,
ALL_COMPLETED,
CancelledError,
TimeoutError,
Future,
Executor,
wait,
as_completed)
from concurrent.futures.thread import ThreadPoolExecutor
# Jython doesn't have multiprocessing
try:
from concurrent.futures.process import ProcessPoolExecutor
except ImportError:
pass

View File

@ -0,0 +1,605 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
from __future__ import with_statement
import logging
import threading
import time
from concurrent.futures._compat import reraise
try:
from collections import namedtuple
except ImportError:
from concurrent.futures._compat import namedtuple
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
ALL_COMPLETED = 'ALL_COMPLETED'
_AS_COMPLETED = '_AS_COMPLETED'
# Possible future states (for internal use by the futures package).
PENDING = 'PENDING'
RUNNING = 'RUNNING'
# The future was cancelled by the user...
CANCELLED = 'CANCELLED'
# ...and _Waiter.add_cancelled() was called by a worker.
CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
FINISHED = 'FINISHED'
_FUTURE_STATES = [
PENDING,
RUNNING,
CANCELLED,
CANCELLED_AND_NOTIFIED,
FINISHED
]
_STATE_TO_DESCRIPTION_MAP = {
PENDING: "pending",
RUNNING: "running",
CANCELLED: "cancelled",
CANCELLED_AND_NOTIFIED: "cancelled",
FINISHED: "finished"
}
# Logger for internal use by the futures package.
LOGGER = logging.getLogger("concurrent.futures")
class Error(Exception):
"""Base class for all future-related exceptions."""
pass
class CancelledError(Error):
"""The Future was cancelled."""
pass
class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
self.event = threading.Event()
self.finished_futures = []
def add_result(self, future):
self.finished_futures.append(future)
def add_exception(self, future):
self.finished_futures.append(future)
def add_cancelled(self, future):
self.finished_futures.append(future)
class _AsCompletedWaiter(_Waiter):
"""Used by as_completed()."""
def __init__(self):
super(_AsCompletedWaiter, self).__init__()
self.lock = threading.Lock()
def add_result(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _FirstCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_COMPLETED)."""
def add_result(self, future):
super(_FirstCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
super(_FirstCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
super(_FirstCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _AllCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
def __init__(self, num_pending_calls, stop_on_exception):
self.num_pending_calls = num_pending_calls
self.stop_on_exception = stop_on_exception
self.lock = threading.Lock()
super(_AllCompletedWaiter, self).__init__()
def _decrement_pending_calls(self):
with self.lock:
self.num_pending_calls -= 1
if not self.num_pending_calls:
self.event.set()
def add_result(self, future):
super(_AllCompletedWaiter, self).add_result(future)
self._decrement_pending_calls()
def add_exception(self, future):
super(_AllCompletedWaiter, self).add_exception(future)
if self.stop_on_exception:
self.event.set()
else:
self._decrement_pending_calls()
def add_cancelled(self, future):
super(_AllCompletedWaiter, self).add_cancelled(future)
self._decrement_pending_calls()
class _AcquireFutures(object):
"""A context manager that does an ordered acquire of Future conditions."""
def __init__(self, futures):
self.futures = sorted(futures, key=id)
def __enter__(self):
for future in self.futures:
future._condition.acquire()
def __exit__(self, *args):
for future in self.futures:
future._condition.release()
def _create_and_install_waiters(fs, return_when):
if return_when == _AS_COMPLETED:
waiter = _AsCompletedWaiter()
elif return_when == FIRST_COMPLETED:
waiter = _FirstCompletedWaiter()
else:
pending_count = sum(
f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
if return_when == FIRST_EXCEPTION:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
elif return_when == ALL_COMPLETED:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
else:
raise ValueError("Invalid return condition: %r" % return_when)
for f in fs:
f._waiters.append(waiter)
return waiter
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator that yields the given Futures as they complete (finished or
cancelled).
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
"""
if timeout is not None:
end_time = timeout + time.time()
with _AcquireFutures(fs):
finished = set(
f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
pending = set(fs) - finished
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
for future in finished:
yield future
while pending:
if timeout is None:
wait_timeout = None
else:
wait_timeout = end_time - time.time()
if wait_timeout < 0:
raise TimeoutError(
'%d (of %d) futures unfinished' % (
len(pending), len(fs)))
waiter.event.wait(wait_timeout)
with waiter.lock:
finished = waiter.finished_futures
waiter.finished_futures = []
waiter.event.clear()
for future in finished:
yield future
pending.remove(future)
finally:
for f in fs:
f._waiters.remove(waiter)
DoneAndNotDoneFutures = namedtuple(
'DoneAndNotDoneFutures', 'done not_done')
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the futures in the given sequence to complete.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
wait upon.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
return_when: Indicates when this function should return. The options
are:
FIRST_COMPLETED - Return when any future finishes or is
cancelled.
FIRST_EXCEPTION - Return when any future finishes by raising an
exception. If no future raises an exception
then it is equivalent to ALL_COMPLETED.
ALL_COMPLETED - Return when all futures finish or are cancelled.
Returns:
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
futures.
"""
with _AcquireFutures(fs):
done = set(f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
not_done = set(fs) - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
if any(f for f in done
if not f.cancelled() and f.exception() is not None):
return DoneAndNotDoneFutures(done, not_done)
if len(done) == len(fs):
return DoneAndNotDoneFutures(done, not_done)
waiter = _create_and_install_waiters(fs, return_when)
waiter.event.wait(timeout)
for f in fs:
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
return DoneAndNotDoneFutures(done, set(fs) - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
def __init__(self):
"""Initializes the future. Should not be called by clients."""
self._condition = threading.Condition()
self._state = PENDING
self._result = None
self._exception = None
self._traceback = None
self._waiters = []
self._done_callbacks = []
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def __repr__(self):
with self._condition:
if self._state == FINISHED:
if self._exception:
return '<Future at %s state=%s raised %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._exception.__class__.__name__)
else:
return '<Future at %s state=%s returned %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._result.__class__.__name__)
return '<Future at %s state=%s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state])
def cancel(self):
"""Cancel the future if possible.
Returns True if the future was cancelled, False otherwise. A future
cannot be cancelled if it is running or has already completed.
"""
with self._condition:
if self._state in [RUNNING, FINISHED]:
return False
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
return True
self._state = CANCELLED
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""Return True if the future has cancelled."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
def running(self):
"""Return True if the future is currently executing."""
with self._condition:
return self._state == RUNNING
def done(self):
"""Return True of the future was cancelled or finished executing."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
def __get_result(self):
if self._exception:
reraise(self._exception, self._traceback)
else:
return self._result
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
Args:
timeout: The number of seconds to wait for the result if the future
isn't done. If None, then there is no limit on the wait time.
Returns:
The result of the call that the future represents.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
Exception: If the call raised then that exception will be raised.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
else:
raise TimeoutError()
def exception_info(self, timeout=None):
"""Return a tuple of (exception, traceback) raised by the call that the
future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception, self._traceback
else:
raise TimeoutError()
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
return self.exception_info(timeout)[0]
# The following methods should only be used by Executors and in tests.
def set_running_or_notify_cancel(self):
"""Mark the future as running or process any cancel notifications.
Should only be used by Executor implementations and unit tests.
If the future has been cancelled (cancel() was called and returned
True) then any threads waiting on the future completing (though calls
to as_completed() or wait()) are notified and False is returned.
If the future was not cancelled then it is put in the running state
(future calls to running() will return True) and True is returned.
This method should be called by Executor implementations before
executing the work associated with this future. If this method returns
False then the work should not be executed.
Returns:
False if the Future was cancelled, True otherwise.
Raises:
RuntimeError: if this method was already called or if set_result()
or set_exception() was called.
"""
with self._condition:
if self._state == CANCELLED:
self._state = CANCELLED_AND_NOTIFIED
for waiter in self._waiters:
waiter.add_cancelled(self)
# self._condition.notify_all() is not necessary because
# self.cancel() triggers a notification.
return False
elif self._state == PENDING:
self._state = RUNNING
return True
else:
LOGGER.critical('Future %s in unexpected state: %s',
id(self.future),
self.future._state)
raise RuntimeError('Future in unexpected state')
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._result = result
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception_info(self, exception, traceback):
"""Sets the result of the future as being the given exception
and traceback.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._exception = exception
self._traceback = traceback
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
self.set_exception_info(exception, None)
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
a Future instance representing the execution of the callable.
Returns:
A Future representing the given call.
"""
raise NotImplementedError()
def map(self, fn, *iterables, **kwargs):
"""Returns a iterator equivalent to map(fn, iter).
Args:
fn: A callable that will take as many arguments as there are
passed iterables.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator equivalent to: map(func, *iterables) but the calls may
be evaluated out-of-order.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
Exception: If fn(*args) raises for any values.
"""
timeout = kwargs.get('timeout')
if timeout is not None:
end_time = timeout + time.time()
fs = [self.submit(fn, *args) for args in zip(*iterables)]
try:
for future in fs:
if timeout is None:
yield future.result()
else:
yield future.result(end_time - time.time())
finally:
for future in fs:
future.cancel()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(wait=True)
return False

View File

@ -0,0 +1,111 @@
from keyword import iskeyword as _iskeyword
from operator import itemgetter as _itemgetter
import sys as _sys
def namedtuple(typename, field_names):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, basestring):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
for name in (typename,) + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_'):
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
dicttxt = ', '.join('%r: t[%d]' % (name, pos) for pos, name in enumerate(field_names))
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(t):
'Return a new dict which maps field names to their values'
return {%(dicttxt)s} \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
_property=property, _tuple=tuple)
try:
exec(template, namespace)
except SyntaxError:
e = _sys.exc_info()[1]
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example).
if hasattr(_sys, '_getframe'):
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
return result
if _sys.version_info[0] < 3:
def reraise(exc, traceback):
locals_ = {'exc_type': type(exc), 'exc_value': exc, 'traceback': traceback}
exec('raise exc_type, exc_value, traceback', {}, locals_)
else:
def reraise(exc, traceback):
# Tracebacks are embedded in exceptions in Python 3
raise exc

View File

@ -0,0 +1,363 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ProcessPoolExecutor.
The follow diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
+----------+ +----------+ +--------+ +-----------+ +---------+
| | => | Work Ids | => | | => | Call Q | => | |
| | +----------+ | | +-----------+ | |
| | | ... | | | | ... | | |
| | | 6 | | | | 5, call() | | |
| | | 7 | | | | ... | | |
| Process | | ... | | Local | +-----------+ | Process |
| Pool | +----------+ | Worker | | #1..n |
| Executor | | Thread | | |
| | +----------- + | | +-----------+ | |
| | <=> | Work Items | <=> | | <= | Result Q | <= | |
| | +------------+ | | +-----------+ | |
| | | 6: call() | | | | ... | | |
| | | future | | | | 4, result | | |
| | | ... | | | | 3, except | | |
+----------+ +------------+ +--------+ +-----------+ +---------+
Executor.submit() called:
- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
- adds the id of the _WorkItem to the "Work Ids" queue
Local worker thread:
- reads work ids from the "Work Ids" queue and looks up the corresponding
WorkItem from the "Work Items" dict: if the work item has been cancelled then
it is simply removed from the dict, otherwise it is repackaged as a
_CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
- reads _ResultItems from "Result Q", updates the future stored in the
"Work Items" dict and deletes the dict entry
Process #1..n:
- reads _CallItems from "Call Q", executes the calls, and puts the resulting
_ResultItems in "Request Q"
"""
from __future__ import with_statement
import atexit
import multiprocessing
import threading
import weakref
import sys
from concurrent.futures import _base
try:
import queue
except ImportError:
import Queue as queue
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads and processes. This is done to allow the
# interpreter to exit when there are still idle processes in a
# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
# allowing workers to die with the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads/processes finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
# (Futures in the call queue cannot be cancelled).
EXTRA_QUEUED_CALLS = 1
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
class _ResultItem(object):
def __init__(self, work_id, exception=None, result=None):
self.work_id = work_id
self.exception = exception
self.result = result
class _CallItem(object):
def __init__(self, work_id, fn, args, kwargs):
self.work_id = work_id
self.fn = fn
self.args = args
self.kwargs = kwargs
def _process_worker(call_queue, result_queue):
"""Evaluates calls from call_queue and places the results in result_queue.
This worker is run in a separate process.
Args:
call_queue: A multiprocessing.Queue of _CallItems that will be read and
evaluated by the worker.
result_queue: A multiprocessing.Queue of _ResultItems that will written
to by the worker.
shutdown: A multiprocessing.Event that will be set as a signal to the
worker that it should exit when call_queue is empty.
"""
while True:
call_item = call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
result_queue.put(None)
return
try:
r = call_item.fn(*call_item.args, **call_item.kwargs)
except BaseException:
e = sys.exc_info()[1]
result_queue.put(_ResultItem(call_item.work_id,
exception=e))
else:
result_queue.put(_ResultItem(call_item.work_id,
result=r))
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.fn,
work_item.args,
work_item.kwargs),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
processes,
pending_work_items,
work_ids_queue,
call_queue,
result_queue):
"""Manages the communication between this process and the worker processes.
This function is run in a local thread.
Args:
executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
this thread. Used to determine if the ProcessPoolExecutor has been
garbage collected and that this function can exit.
process: A list of the multiprocessing.Process instances used as
workers.
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems for processing by the process workers.
result_queue: A multiprocessing.Queue of _ResultItems generated by the
process workers.
"""
nb_shutdown_processes = [0]
def shutdown_one_process():
"""Tell a worker to terminate, which will in turn wake us again"""
call_queue.put(None)
nb_shutdown_processes[0] += 1
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
call_queue)
result_item = result_queue.get(block=True)
if result_item is not None:
work_item = pending_work_items[result_item.work_id]
del pending_work_items[result_item.work_id]
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if _shutdown or executor is None or executor._shutdown_thread:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
while nb_shutdown_processes[0] < len(processes):
shutdown_one_process()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS
# X.
for p in processes:
p.join()
call_queue.close()
return
del executor
_system_limits_checked = False
_system_limited = None
def _check_system_limits():
global _system_limits_checked, _system_limited
if _system_limits_checked:
if _system_limited:
raise NotImplementedError(_system_limited)
_system_limits_checked = True
try:
import os
nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
except (AttributeError, ValueError):
# sysconf not available or setting not available
return
if nsems_max == -1:
# indetermine limit, assume that limit is determined
# by available memory only
return
if nsems_max >= 256:
# minimum number of semaphores available
# according to POSIX
return
_system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
raise NotImplementedError(_system_limited)
class ProcessPoolExecutor(_base.Executor):
def __init__(self, max_workers=None):
"""Initializes a new ProcessPoolExecutor instance.
Args:
max_workers: The maximum number of processes that can be used to
execute the given calls. If None or not given then as many
worker processes will be created as the machine has processors.
"""
_check_system_limits()
if max_workers is None:
self._max_workers = multiprocessing.cpu_count()
else:
self._max_workers = max_workers
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
# because futures in the call queue cannot be cancelled.
self._call_queue = multiprocessing.Queue(self._max_workers +
EXTRA_QUEUED_CALLS)
self._result_queue = multiprocessing.Queue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
self._processes = set()
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._queue_count = 0
self._pending_work_items = {}
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._processes,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
_threads_queues[self._queue_management_thread] = self._result_queue
def _adjust_process_count(self):
for _ in range(len(self._processes), self._max_workers):
p = multiprocessing.Process(
target=_process_worker,
args=(self._call_queue,
self._result_queue))
p.start()
self._processes.add(p)
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
self._adjust_process_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join()
# To reduce the risk of openning too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._processes = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
atexit.register(_python_exit)

View File

@ -0,0 +1,138 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Implements ThreadPoolExecutor."""
from __future__ import with_statement
import atexit
import threading
import weakref
import sys
from concurrent.futures import _base
try:
import queue
except ImportError:
import Queue as queue
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
# Workers are created as daemon threads. This is done to allow the interpreter
# to exit when there are still idle threads in a ThreadPoolExecutor's thread
# pool (i.e. shutdown() was not called). However, allowing workers to die with
# the interpreter has two undesirable properties:
# - The workers would still be running during interpretor shutdown,
# meaning that they would fail in unpredictable ways.
# - The workers could be killed while evaluating a work item, which could
# be bad if the callable being evaluated has external side-effects e.g.
# writing to a file.
#
# To work around this problem, an exit handler is installed which tells the
# workers to exit when their work queues are empty and then waits until the
# threads finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
def _python_exit():
global _shutdown
_shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
atexit.register(_python_exit)
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
self.fn = fn
self.args = args
self.kwargs = kwargs
def run(self):
if not self.future.set_running_or_notify_cancel():
return
try:
result = self.fn(*self.args, **self.kwargs)
except BaseException:
e, tb = sys.exc_info()[1:]
self.future.set_exception_info(e, tb)
else:
self.future.set_result(result)
def _worker(executor_reference, work_queue):
try:
while True:
work_item = work_queue.get(block=True)
if work_item is not None:
work_item.run()
continue
executor = executor_reference()
# Exit if:
# - The interpreter is shutting down OR
# - The executor that owns the worker has been collected OR
# - The executor that owns the worker has been shutdown.
if _shutdown or executor is None or executor._shutdown:
# Notice other workers
work_queue.put(None)
return
del executor
except BaseException:
_base.LOGGER.critical('Exception in worker', exc_info=True)
class ThreadPoolExecutor(_base.Executor):
def __init__(self, max_workers):
"""Initializes a new ThreadPoolExecutor instance.
Args:
max_workers: The maximum number of threads that can be used to
execute the given calls.
"""
self._max_workers = max_workers
self._work_queue = queue.Queue()
self._threads = set()
self._shutdown = False
self._shutdown_lock = threading.Lock()
def submit(self, fn, *args, **kwargs):
with self._shutdown_lock:
if self._shutdown:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, fn, args, kwargs)
self._work_queue.put(w)
self._adjust_thread_count()
return f
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):
# When the executor gets lost, the weakref callback will wake up
# the worker threads.
def weakref_cb(_, q=self._work_queue):
q.put(None)
# TODO(bquinlan): Should avoid creating new threads if there are more
# idle threads than items in the work queue.
if len(self._threads) < self._max_workers:
t = threading.Thread(target=_worker,
args=(weakref.ref(self, weakref_cb),
self._work_queue))
t.daemon = True
t.start()
self._threads.add(t)
_threads_queues[t] = self._work_queue
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown = True
self._work_queue.put(None)
if wait:
for t in self._threads:
t.join()
shutdown.__doc__ = _base.Executor.shutdown.__doc__

24
lib/futures/__init__.py Normal file
View File

@ -0,0 +1,24 @@
# Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Execute computations asynchronously using threads or processes."""
import warnings
from concurrent.futures import (FIRST_COMPLETED,
FIRST_EXCEPTION,
ALL_COMPLETED,
CancelledError,
TimeoutError,
Future,
Executor,
wait,
as_completed,
ProcessPoolExecutor,
ThreadPoolExecutor)
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
warnings.warn('The futures package has been deprecated. '
'Use the concurrent.futures package instead.',
DeprecationWarning)

1
lib/futures/process.py Normal file
View File

@ -0,0 +1 @@
from concurrent.futures import ProcessPoolExecutor

1
lib/futures/thread.py Normal file
View File

@ -0,0 +1 @@
from concurrent.futures import ThreadPoolExecutor

View File

@ -25,7 +25,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
__all__ = ['IMDb', 'IMDbError', 'Movie', 'Person', 'Character', 'Company',
'available_access_systems']
__version__ = VERSION = '5.0'
__version__ = VERSION = '5.1dev20141116'
# Import compatibility module (importing it is enough).
import _compat

View File

@ -1,78 +0,0 @@
#
# IMDbPY configuration file.
#
# This file can be placed in many locations; the first file found is
# used, _ignoring_ the content of the others.
#
# Place it in one of the following directories (in order of precedence):
#
# - imdbpy.cfg in the current directory.
# - .imdbpy.cfg in the current directory.
# - imdbpy.cfg in the user's home directory.
# - .imdbpy.cfg in the user's home directory.
# - /etc/imdbpy.cfg Unix-like systems only.
# - /etc/conf.d/imdbpy.cfg Unix-like systems only.
# - sys.prefix + imdbpy.cfg for non-Unix (e.g.: C:\Python\etc\imdbpy.cfg)
#
# If this file is not found, 'http' access system is used by default.
#
# Lines starting with #, ; and // are considered comments and ignored.
#
# Some special values are replaced with Python equivalents (case insensitive):
#
# 0, off, false, no -> False
# 1, on, true, yes -> True
# none -> None
#
# Other options, like defaultModFunct, must be passed by the code.
#
[imdbpy]
## Default.
accessSystem = httpThin
## Optional (options common to every data access system):
# Activate adult searches (on, by default).
#adultSearch = on
# Number of results for searches (20 by default).
#results = 20
# Re-raise all caught exceptions (off, by default).
#reraiseExceptions = off
## Optional (options common to http and mobile data access systems):
# Proxy used to access the network. If it requires authentication,
# try with: http://username:password@server_address:port/
#proxy = http://localhost:8080/
# Cookies of the IMDb.com account
#cookie_id = string_representing_the_cookie_id
#cookie_uu = string_representing_the_cookie_uu
## Timeout for the connection to IMDb (30 seconds, by default).
#timeout = 30
# Base url to access pages on the IMDb.com web server.
#imdbURL_base = http://akas.imdb.com/
## Parameters for the 'http' data access system.
# Parser to use; can be a single value or a list of value separated by
# a comma, to express order preference. Valid values: "lxml", "beautifulsoup"
#useModule = lxml,beautifulsoup
## Parameters for the 'mobile' data access system.
#accessSystem = mobile
## Parameters for the 'sql' data access system.
#accessSystem = sql
#uri = mysql://user:password@localhost/imdb
# ORM to use; can be a single value or a list of value separated by
# a comma, to express order preference. Valid values: "sqlobject", "sqlalchemy"
#useORM = sqlobject,sqlalchemy
## Set the threshold for logging messages.
# Can be one of "debug", "info", "warning", "error", "critical" (default:
# "warning").
loggingLevel = debug
## Path to a configuration file for the logging facility;
# see: http://docs.python.org/library/logging.html#configuring-logging
#loggingConfig = ~/.imdbpy-logger.cfg

View File

@ -639,11 +639,14 @@ def analyze_company_name(name, stripNotes=False):
o_name = name
name = name.strip()
country = None
if name.endswith(']'):
idx = name.rfind('[')
if idx != -1:
country = name[idx:]
name = name[:idx].rstrip()
if name.startswith('['):
name = re.sub('[!@#$\(\)\[\]]', '', name)
else:
if name.endswith(']'):
idx = name.rfind('[')
if idx != -1:
country = name[idx:]
name = name[:idx].rstrip()
if not name:
raise IMDbParserError('invalid name: "%s"' % o_name)
result = {'name': name}

View File

@ -59,9 +59,14 @@ HAS_SNI = SUBJ_ALT_NAME_SUPPORT
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
try:
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
except AttributeError:
pass
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,

View File

@ -1,519 +0,0 @@
# -*- coding: utf-8 -*-
'''Common object storage frontend.'''
import os
import zlib
import urllib
try:
import cPickle as pickle
except ImportError:
import pickle
from collections import deque
try:
# Import store and cache entry points if setuptools installed
import pkg_resources
stores = dict((_store.name, _store) for _store in
pkg_resources.iter_entry_points('shove.stores'))
caches = dict((_cache.name, _cache) for _cache in
pkg_resources.iter_entry_points('shove.caches'))
# Pass if nothing loaded
if not stores and not caches:
raise ImportError()
except ImportError:
# Static store backend registry
stores = dict(
bsddb='shove.store.bsdb:BsdStore',
cassandra='shove.store.cassandra:CassandraStore',
dbm='shove.store.dbm:DbmStore',
durus='shove.store.durusdb:DurusStore',
file='shove.store.file:FileStore',
firebird='shove.store.db:DbStore',
ftp='shove.store.ftp:FtpStore',
hdf5='shove.store.hdf5:HDF5Store',
leveldb='shove.store.leveldbstore:LevelDBStore',
memory='shove.store.memory:MemoryStore',
mssql='shove.store.db:DbStore',
mysql='shove.store.db:DbStore',
oracle='shove.store.db:DbStore',
postgres='shove.store.db:DbStore',
redis='shove.store.redisdb:RedisStore',
s3='shove.store.s3:S3Store',
simple='shove.store.simple:SimpleStore',
sqlite='shove.store.db:DbStore',
svn='shove.store.svn:SvnStore',
zodb='shove.store.zodb:ZodbStore',
)
# Static cache backend registry
caches = dict(
bsddb='shove.cache.bsdb:BsdCache',
file='shove.cache.file:FileCache',
filelru='shove.cache.filelru:FileLRUCache',
firebird='shove.cache.db:DbCache',
memcache='shove.cache.memcached:MemCached',
memlru='shove.cache.memlru:MemoryLRUCache',
memory='shove.cache.memory:MemoryCache',
mssql='shove.cache.db:DbCache',
mysql='shove.cache.db:DbCache',
oracle='shove.cache.db:DbCache',
postgres='shove.cache.db:DbCache',
redis='shove.cache.redisdb:RedisCache',
simple='shove.cache.simple:SimpleCache',
simplelru='shove.cache.simplelru:SimpleLRUCache',
sqlite='shove.cache.db:DbCache',
)
def getbackend(uri, engines, **kw):
'''
Loads the right backend based on a URI.
@param uri Instance or name string
@param engines A dictionary of scheme/class pairs
'''
if isinstance(uri, basestring):
mod = engines[uri.split('://', 1)[0]]
# Load module if setuptools not present
if isinstance(mod, basestring):
# Isolate classname from dot path
module, klass = mod.split(':')
# Load module
mod = getattr(__import__(module, '', '', ['']), klass)
# Load appropriate class from setuptools entry point
else:
mod = mod.load()
# Return instance
return mod(uri, **kw)
# No-op for existing instances
return uri
def synchronized(func):
'''
Decorator to lock and unlock a method (Phillip J. Eby).
@param func Method to decorate
'''
def wrapper(self, *__args, **__kw):
self._lock.acquire()
try:
return func(self, *__args, **__kw)
finally:
self._lock.release()
wrapper.__name__ = func.__name__
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
return wrapper
class Base(object):
'''Base Mapping class.'''
def __init__(self, engine, **kw):
'''
@keyword compress True, False, or an integer compression level (1-9).
'''
self._compress = kw.get('compress', False)
self._protocol = kw.get('protocol', pickle.HIGHEST_PROTOCOL)
def __getitem__(self, key):
raise NotImplementedError()
def __setitem__(self, key, value):
raise NotImplementedError()
def __delitem__(self, key):
raise NotImplementedError()
def __contains__(self, key):
try:
value = self[key]
except KeyError:
return False
return True
def get(self, key, default=None):
'''
Fetch a given key from the mapping. If the key does not exist,
return the default.
@param key Keyword of item in mapping.
@param default Default value (default: None)
'''
try:
return self[key]
except KeyError:
return default
def dumps(self, value):
'''Optionally serializes and compresses an object.'''
# Serialize everything but ASCII strings
value = pickle.dumps(value, protocol=self._protocol)
if self._compress:
level = 9 if self._compress is True else self._compress
value = zlib.compress(value, level)
return value
def loads(self, value):
'''Deserializes and optionally decompresses an object.'''
if self._compress:
try:
value = zlib.decompress(value)
except zlib.error:
pass
value = pickle.loads(value)
return value
class BaseStore(Base):
'''Base Store class (based on UserDict.DictMixin).'''
def __init__(self, engine, **kw):
super(BaseStore, self).__init__(engine, **kw)
self._store = None
def __cmp__(self, other):
if other is None:
return False
if isinstance(other, BaseStore):
return cmp(dict(self.iteritems()), dict(other.iteritems()))
def __del__(self):
# __init__ didn't succeed, so don't bother closing
if not hasattr(self, '_store'):
return
self.close()
def __iter__(self):
for k in self.keys():
yield k
def __len__(self):
return len(self.keys())
def __repr__(self):
return repr(dict(self.iteritems()))
def close(self):
'''Closes internal store and clears object references.'''
try:
self._store.close()
except AttributeError:
pass
self._store = None
def clear(self):
'''Removes all keys and values from a store.'''
for key in self.keys():
del self[key]
def items(self):
'''Returns a list with all key/value pairs in the store.'''
return list(self.iteritems())
def iteritems(self):
'''Lazily returns all key/value pairs in a store.'''
for k in self:
yield (k, self[k])
def iterkeys(self):
'''Lazy returns all keys in a store.'''
return self.__iter__()
def itervalues(self):
'''Lazily returns all values in a store.'''
for _, v in self.iteritems():
yield v
def keys(self):
'''Returns a list with all keys in a store.'''
raise NotImplementedError()
def pop(self, key, *args):
'''
Removes and returns a value from a store.
@param args Default to return if key not present.
'''
if len(args) > 1:
raise TypeError('pop expected at most 2 arguments, got ' + repr(
1 + len(args))
)
try:
value = self[key]
# Return default if key not in store
except KeyError:
if args:
return args[0]
del self[key]
return value
def popitem(self):
'''Removes and returns a key, value pair from a store.'''
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError('Store is empty.')
del self[k]
return (k, v)
def setdefault(self, key, default=None):
'''
Returns the value corresponding to an existing key or sets the
to key to the default and returns the default.
@param default Default value (default: None)
'''
try:
return self[key]
except KeyError:
self[key] = default
return default
def update(self, other=None, **kw):
'''
Adds to or overwrites the values in this store with values from
another store.
other Another store
kw Additional keys and values to store
'''
if other is None:
pass
elif hasattr(other, 'iteritems'):
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kw:
self.update(kw)
def values(self):
'''Returns a list with all values in a store.'''
return list(v for _, v in self.iteritems())
class Shove(BaseStore):
'''Common object frontend class.'''
def __init__(self, store='simple://', cache='simple://', **kw):
super(Shove, self).__init__(store, **kw)
# Load store
self._store = getbackend(store, stores, **kw)
# Load cache
self._cache = getbackend(cache, caches, **kw)
# Buffer for lazy writing and setting for syncing frequency
self._buffer, self._sync = dict(), kw.get('sync', 2)
def __getitem__(self, key):
'''Gets a item from shove.'''
try:
return self._cache[key]
except KeyError:
# Synchronize cache and store
self.sync()
value = self._store[key]
self._cache[key] = value
return value
def __setitem__(self, key, value):
'''Sets an item in shove.'''
self._cache[key] = self._buffer[key] = value
# When the buffer reaches self._limit, writes the buffer to the store
if len(self._buffer) >= self._sync:
self.sync()
def __delitem__(self, key):
'''Deletes an item from shove.'''
try:
del self._cache[key]
except KeyError:
pass
self.sync()
del self._store[key]
def keys(self):
'''Returns a list of keys in shove.'''
self.sync()
return self._store.keys()
def sync(self):
'''Writes buffer to store.'''
for k, v in self._buffer.iteritems():
self._store[k] = v
self._buffer.clear()
def close(self):
'''Finalizes and closes shove.'''
# If close has been called, pass
if self._store is not None:
try:
self.sync()
except AttributeError:
pass
self._store.close()
self._store = self._cache = self._buffer = None
class FileBase(Base):
'''Base class for file based storage.'''
def __init__(self, engine, **kw):
super(FileBase, self).__init__(engine, **kw)
if engine.startswith('file://'):
engine = urllib.url2pathname(engine.split('://')[1])
self._dir = engine
# Create directory
if not os.path.exists(self._dir):
self._createdir()
def __getitem__(self, key):
# (per Larry Meyn)
try:
item = open(self._key_to_file(key), 'rb')
data = item.read()
item.close()
return self.loads(data)
except:
raise KeyError(key)
def __setitem__(self, key, value):
# (per Larry Meyn)
try:
item = open(self._key_to_file(key), 'wb')
item.write(self.dumps(value))
item.close()
except (IOError, OSError):
raise KeyError(key)
def __delitem__(self, key):
try:
os.remove(self._key_to_file(key))
except (IOError, OSError):
raise KeyError(key)
def __contains__(self, key):
return os.path.exists(self._key_to_file(key))
def __len__(self):
return len(os.listdir(self._dir))
def _createdir(self):
'''Creates the store directory.'''
try:
os.makedirs(self._dir)
except OSError:
raise EnvironmentError(
'Cache directory "%s" does not exist and ' \
'could not be created' % self._dir
)
def _key_to_file(self, key):
'''Gives the filesystem path for a key.'''
return os.path.join(self._dir, urllib.quote_plus(key))
def keys(self):
'''Returns a list of keys in the store.'''
return [urllib.unquote_plus(name) for name in os.listdir(self._dir)]
class SimpleBase(Base):
'''Single-process in-memory store base class.'''
def __init__(self, engine, **kw):
super(SimpleBase, self).__init__(engine, **kw)
self._store = dict()
def __getitem__(self, key):
try:
return self._store[key]
except:
raise KeyError(key)
def __setitem__(self, key, value):
self._store[key] = value
def __delitem__(self, key):
try:
del self._store[key]
except:
raise KeyError(key)
def __len__(self):
return len(self._store)
def keys(self):
'''Returns a list of keys in the store.'''
return self._store.keys()
class LRUBase(SimpleBase):
def __init__(self, engine, **kw):
super(LRUBase, self).__init__(engine, **kw)
self._max_entries = kw.get('max_entries', 300)
self._hits = 0
self._misses = 0
self._queue = deque()
self._refcount = dict()
def __getitem__(self, key):
try:
value = super(LRUBase, self).__getitem__(key)
self._hits += 1
except KeyError:
self._misses += 1
raise
self._housekeep(key)
return value
def __setitem__(self, key, value):
super(LRUBase, self).__setitem__(key, value)
self._housekeep(key)
if len(self._store) > self._max_entries:
while len(self._store) > self._max_entries:
k = self._queue.popleft()
self._refcount[k] -= 1
if not self._refcount[k]:
super(LRUBase, self).__delitem__(k)
del self._refcount[k]
def _housekeep(self, key):
self._queue.append(key)
self._refcount[key] = self._refcount.get(key, 0) + 1
if len(self._queue) > self._max_entries * 4:
self._purge_queue()
def _purge_queue(self):
for i in [None] * len(self._queue):
k = self._queue.popleft()
if self._refcount[k] == 1:
self._queue.append(k)
else:
self._refcount[k] -= 1
class DbBase(Base):
'''Database common base class.'''
def __init__(self, engine, **kw):
super(DbBase, self).__init__(engine, **kw)
def __delitem__(self, key):
self._store.delete(self._store.c.key == key).execute()
def __len__(self):
return self._store.count().execute().fetchone()[0]
__all__ = ['Shove']

View File

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

117
lib/shove/cache/db.py vendored
View File

@ -1,117 +0,0 @@
# -*- coding: utf-8 -*-
'''
Database object cache.
The shove psuedo-URL used for database object caches is the format used by
SQLAlchemy:
<driver>://<username>:<password>@<host>:<port>/<database>
<driver> is the database engine. The engines currently supported SQLAlchemy are
sqlite, mysql, postgres, oracle, mssql, and firebird.
<username> is the database account user name
<password> is the database accound password
<host> is the database location
<port> is the database port
<database> is the name of the specific database
For more information on specific databases see:
http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
'''
import time
import random
from datetime import datetime
try:
from sqlalchemy import (
MetaData, Table, Column, String, Binary, DateTime, select, update,
insert, delete,
)
from shove import DbBase
except ImportError:
raise ImportError('Requires SQLAlchemy >= 0.4')
__all__ = ['DbCache']
class DbCache(DbBase):
'''database cache backend'''
def __init__(self, engine, **kw):
super(DbCache, self).__init__(engine, **kw)
# Get table name
tablename = kw.get('tablename', 'cache')
# Bind metadata
self._metadata = MetaData(engine)
# Make cache table
self._store = Table(tablename, self._metadata,
Column('key', String(60), primary_key=True, nullable=False),
Column('value', Binary, nullable=False),
Column('expires', DateTime, nullable=False),
)
# Create cache table if it does not exist
if not self._store.exists():
self._store.create()
# Set maximum entries
self._max_entries = kw.get('max_entries', 300)
# Maximum number of entries to cull per call if cache is full
self._maxcull = kw.get('maxcull', 10)
# Set timeout
self.timeout = kw.get('timeout', 300)
def __getitem__(self, key):
row = select(
[self._store.c.value, self._store.c.expires],
self._store.c.key == key
).execute().fetchone()
if row is not None:
# Remove if item expired
if row.expires < datetime.now().replace(microsecond=0):
del self[key]
raise KeyError(key)
return self.loads(str(row.value))
raise KeyError(key)
def __setitem__(self, key, value):
timeout, value, cache = self.timeout, self.dumps(value), self._store
# Cull if too many items
if len(self) >= self._max_entries:
self._cull()
# Generate expiration time
expires = datetime.fromtimestamp(
time.time() + timeout
).replace(microsecond=0)
# Update database if key already present
if key in self:
update(
cache,
cache.c.key == key,
dict(value=value, expires=expires),
).execute()
# Insert new key if key not present
else:
insert(
cache, dict(key=key, value=value, expires=expires)
).execute()
def _cull(self):
'''Remove items in cache to make more room.'''
cache, maxcull = self._store, self._maxcull
# Remove items that have timed out
now = datetime.now().replace(microsecond=0)
delete(cache, cache.c.expires < now).execute()
# Remove any items over the maximum allowed number in the cache
if len(self) >= self._max_entries:
# Upper limit for key query
ul = maxcull * 2
# Get list of keys
keys = [
i[0] for i in select(
[cache.c.key], limit=ul
).execute().fetchall()
]
# Get some keys at random
delkeys = list(random.choice(keys) for i in xrange(maxcull))
delete(cache, cache.c.key.in_(delkeys)).execute()

View File

@ -1,46 +0,0 @@
# -*- coding: utf-8 -*-
'''
File-based cache
shove's psuedo-URL for file caches follows the form:
file://<path>
Where the path is a URL path to a directory on a local filesystem.
Alternatively, a native pathname to the directory can be passed as the 'engine'
argument.
'''
import time
from shove import FileBase
from shove.cache.simple import SimpleCache
class FileCache(FileBase, SimpleCache):
'''File-based cache backend'''
def __init__(self, engine, **kw):
super(FileCache, self).__init__(engine, **kw)
def __getitem__(self, key):
try:
exp, value = super(FileCache, self).__getitem__(key)
# Remove item if time has expired.
if exp < time.time():
del self[key]
raise KeyError(key)
return value
except:
raise KeyError(key)
def __setitem__(self, key, value):
if len(self) >= self._max_entries:
self._cull()
super(FileCache, self).__setitem__(
key, (time.time() + self.timeout, value)
)
__all__ = ['FileCache']

View File

@ -1,23 +0,0 @@
# -*- coding: utf-8 -*-
'''
File-based LRU cache
shove's psuedo-URL for file caches follows the form:
file://<path>
Where the path is a URL path to a directory on a local filesystem.
Alternatively, a native pathname to the directory can be passed as the 'engine'
argument.
'''
from shove import FileBase
from shove.cache.simplelru import SimpleLRUCache
class FileCache(FileBase, SimpleLRUCache):
'''File-based LRU cache backend'''
__all__ = ['FileCache']

View File

@ -1,43 +0,0 @@
# -*- coding: utf-8 -*-
'''
"memcached" cache.
The shove psuedo-URL for a memcache cache is:
memcache://<memcache_server>
'''
try:
import memcache
except ImportError:
raise ImportError("Memcache cache requires the 'memcache' library")
from shove import Base
class MemCached(Base):
'''Memcached cache backend'''
def __init__(self, engine, **kw):
super(MemCached, self).__init__(engine, **kw)
if engine.startswith('memcache://'):
engine = engine.split('://')[1]
self._store = memcache.Client(engine.split(';'))
# Set timeout
self.timeout = kw.get('timeout', 300)
def __getitem__(self, key):
value = self._store.get(key)
if value is None:
raise KeyError(key)
return self.loads(value)
def __setitem__(self, key, value):
self._store.set(key, self.dumps(value), self.timeout)
def __delitem__(self, key):
self._store.delete(key)
__all__ = ['MemCached']

View File

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
'''
Thread-safe in-memory cache using LRU.
The shove psuedo-URL for a memory cache is:
memlru://
'''
import copy
import threading
from shove import synchronized
from shove.cache.simplelru import SimpleLRUCache
class MemoryLRUCache(SimpleLRUCache):
'''Thread-safe in-memory cache backend using LRU.'''
def __init__(self, engine, **kw):
super(MemoryLRUCache, self).__init__(engine, **kw)
self._lock = threading.Condition()
@synchronized
def __setitem__(self, key, value):
super(MemoryLRUCache, self).__setitem__(key, value)
@synchronized
def __getitem__(self, key):
return copy.deepcopy(super(MemoryLRUCache, self).__getitem__(key))
@synchronized
def __delitem__(self, key):
super(MemoryLRUCache, self).__delitem__(key)
__all__ = ['MemoryLRUCache']

View File

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
'''
Thread-safe in-memory cache.
The shove psuedo-URL for a memory cache is:
memory://
'''
import copy
import threading
from shove import synchronized
from shove.cache.simple import SimpleCache
class MemoryCache(SimpleCache):
'''Thread-safe in-memory cache backend.'''
def __init__(self, engine, **kw):
super(MemoryCache, self).__init__(engine, **kw)
self._lock = threading.Condition()
@synchronized
def __setitem__(self, key, value):
super(MemoryCache, self).__setitem__(key, value)
@synchronized
def __getitem__(self, key):
return copy.deepcopy(super(MemoryCache, self).__getitem__(key))
@synchronized
def __delitem__(self, key):
super(MemoryCache, self).__delitem__(key)
__all__ = ['MemoryCache']

View File

@ -1,45 +0,0 @@
# -*- coding: utf-8 -*-
'''
Redis-based object cache
The shove psuedo-URL for a redis cache is:
redis://<host>:<port>/<db>
'''
import urlparse
try:
import redis
except ImportError:
raise ImportError('This store requires the redis library')
from shove import Base
class RedisCache(Base):
'''Redis cache backend'''
init = 'redis://'
def __init__(self, engine, **kw):
super(RedisCache, self).__init__(engine, **kw)
spliturl = urlparse.urlsplit(engine)
host, port = spliturl[1].split(':')
db = spliturl[2].replace('/', '')
self._store = redis.Redis(host, int(port), db)
# Set timeout
self.timeout = kw.get('timeout', 300)
def __getitem__(self, key):
return self.loads(self._store[key])
def __setitem__(self, key, value):
self._store.setex(key, self.dumps(value), self.timeout)
def __delitem__(self, key):
self._store.delete(key)
__all__ = ['RedisCache']

View File

@ -1,68 +0,0 @@
# -*- coding: utf-8 -*-
'''
Single-process in-memory cache.
The shove psuedo-URL for a simple cache is:
simple://
'''
import time
import random
from shove import SimpleBase
class SimpleCache(SimpleBase):
'''Single-process in-memory cache.'''
def __init__(self, engine, **kw):
super(SimpleCache, self).__init__(engine, **kw)
# Get random seed
random.seed()
# Set maximum number of items to cull if over max
self._maxcull = kw.get('maxcull', 10)
# Set max entries
self._max_entries = kw.get('max_entries', 300)
# Set timeout
self.timeout = kw.get('timeout', 300)
def __getitem__(self, key):
exp, value = super(SimpleCache, self).__getitem__(key)
# Delete if item timed out.
if exp < time.time():
super(SimpleCache, self).__delitem__(key)
raise KeyError(key)
return value
def __setitem__(self, key, value):
# Cull values if over max # of entries
if len(self) >= self._max_entries:
self._cull()
# Set expiration time and value
exp = time.time() + self.timeout
super(SimpleCache, self).__setitem__(key, (exp, value))
def _cull(self):
'''Remove items in cache to make room.'''
num, maxcull = 0, self._maxcull
# Cull number of items allowed (set by self._maxcull)
for key in self.keys():
# Remove only maximum # of items allowed by maxcull
if num <= maxcull:
# Remove items if expired
try:
self[key]
except KeyError:
num += 1
else:
break
# Remove any additional items up to max # of items allowed by maxcull
while len(self) >= self._max_entries and num <= maxcull:
# Cull remainder of allowed quota at random
del self[random.choice(self.keys())]
num += 1
__all__ = ['SimpleCache']

View File

@ -1,18 +0,0 @@
# -*- coding: utf-8 -*-
'''
Single-process in-memory LRU cache.
The shove psuedo-URL for a simple cache is:
simplelru://
'''
from shove import LRUBase
class SimpleLRUCache(LRUBase):
'''In-memory cache that purges based on least recently used item.'''
__all__ = ['SimpleLRUCache']

View File

@ -1,48 +0,0 @@
# -*- coding: utf-8 -*-
from urllib import url2pathname
from shove.store.simple import SimpleStore
class ClientStore(SimpleStore):
'''Base class for stores where updates have to be committed.'''
def __init__(self, engine, **kw):
super(ClientStore, self).__init__(engine, **kw)
if engine.startswith(self.init):
self._engine = url2pathname(engine.split('://')[1])
def __getitem__(self, key):
return self.loads(super(ClientStore, self).__getitem__(key))
def __setitem__(self, key, value):
super(ClientStore, self).__setitem__(key, self.dumps(value))
class SyncStore(ClientStore):
'''Base class for stores where updates have to be committed.'''
def __getitem__(self, key):
return self.loads(super(SyncStore, self).__getitem__(key))
def __setitem__(self, key, value):
super(SyncStore, self).__setitem__(key, value)
try:
self.sync()
except AttributeError:
pass
def __delitem__(self, key):
super(SyncStore, self).__delitem__(key)
try:
self.sync()
except AttributeError:
pass
__all__ = [
'bsdb', 'db', 'dbm', 'durusdb', 'file', 'ftp', 'memory', 's3', 'simple',
'svn', 'zodb', 'redisdb', 'hdf5db', 'leveldbstore', 'cassandra',
]

View File

@ -1,48 +0,0 @@
# -*- coding: utf-8 -*-
'''
Berkeley Source Database Store.
shove's psuedo-URL for BSDDB stores follows the form:
bsddb://<path>
Where the path is a URL path to a Berkeley database. Alternatively, the native
pathname to a Berkeley database can be passed as the 'engine' parameter.
'''
try:
import bsddb
except ImportError:
raise ImportError('requires bsddb library')
import threading
from shove import synchronized
from shove.store import SyncStore
class BsdStore(SyncStore):
'''Class for Berkeley Source Database Store.'''
init = 'bsddb://'
def __init__(self, engine, **kw):
super(BsdStore, self).__init__(engine, **kw)
self._store = bsddb.hashopen(self._engine)
self._lock = threading.Condition()
self.sync = self._store.sync
@synchronized
def __getitem__(self, key):
return super(BsdStore, self).__getitem__(key)
@synchronized
def __setitem__(self, key, value):
super(BsdStore, self).__setitem__(key, value)
@synchronized
def __delitem__(self, key):
super(BsdStore, self).__delitem__(key)
__all__ = ['BsdStore']

View File

@ -1,72 +0,0 @@
# -*- coding: utf-8 -*-
'''
Cassandra-based object store
The shove psuedo-URL for a cassandra-based store is:
cassandra://<host>:<port>/<keyspace>/<columnFamily>
'''
import urlparse
try:
import pycassa
except ImportError:
raise ImportError('This store requires the pycassa library')
from shove import BaseStore
class CassandraStore(BaseStore):
'''Cassandra based store'''
init = 'cassandra://'
def __init__(self, engine, **kw):
super(CassandraStore, self).__init__(engine, **kw)
spliturl = urlparse.urlsplit(engine)
_, keyspace, column_family = spliturl[2].split('/')
try:
self._pool = pycassa.connect(keyspace, [spliturl[1]])
self._store = pycassa.ColumnFamily(self._pool, column_family)
except pycassa.InvalidRequestException:
from pycassa.system_manager import SystemManager
system_manager = SystemManager(spliturl[1])
system_manager.create_keyspace(
keyspace,
pycassa.system_manager.SIMPLE_STRATEGY,
{'replication_factor': str(kw.get('replication', 1))}
)
system_manager.create_column_family(keyspace, column_family)
self._pool = pycassa.connect(keyspace, [spliturl[1]])
self._store = pycassa.ColumnFamily(self._pool, column_family)
def __getitem__(self, key):
try:
item = self._store.get(key).get(key)
if item is not None:
return self.loads(item)
raise KeyError(key)
except pycassa.NotFoundException:
raise KeyError(key)
def __setitem__(self, key, value):
self._store.insert(key, dict(key=self.dumps(value)))
def __delitem__(self, key):
# beware eventual consistency
try:
self._store.remove(key)
except pycassa.NotFoundException:
raise KeyError(key)
def clear(self):
# beware eventual consistency
self._store.truncate()
def keys(self):
return list(i[0] for i in self._store.get_range())
__all__ = ['CassandraStore']

View File

@ -1,73 +0,0 @@
# -*- coding: utf-8 -*-
'''
Database object store.
The shove psuedo-URL used for database object stores is the format used by
SQLAlchemy:
<driver>://<username>:<password>@<host>:<port>/<database>
<driver> is the database engine. The engines currently supported SQLAlchemy are
sqlite, mysql, postgres, oracle, mssql, and firebird.
<username> is the database account user name
<password> is the database accound password
<host> is the database location
<port> is the database port
<database> is the name of the specific database
For more information on specific databases see:
http://www.sqlalchemy.org/docs/dbengine.myt#dbengine_supported
'''
try:
from sqlalchemy import MetaData, Table, Column, String, Binary, select
from shove import BaseStore, DbBase
except ImportError, e:
raise ImportError('Error: ' + e + ' Requires SQLAlchemy >= 0.4')
class DbStore(BaseStore, DbBase):
'''Database cache backend.'''
def __init__(self, engine, **kw):
super(DbStore, self).__init__(engine, **kw)
# Get tablename
tablename = kw.get('tablename', 'store')
# Bind metadata
self._metadata = MetaData(engine)
# Make store table
self._store = Table(tablename, self._metadata,
Column('key', String(255), primary_key=True, nullable=False),
Column('value', Binary, nullable=False),
)
# Create store table if it does not exist
if not self._store.exists():
self._store.create()
def __getitem__(self, key):
row = select(
[self._store.c.value], self._store.c.key == key,
).execute().fetchone()
if row is not None:
return self.loads(str(row.value))
raise KeyError(key)
def __setitem__(self, k, v):
v, store = self.dumps(v), self._store
# Update database if key already present
if k in self:
store.update(store.c.key == k).execute(value=v)
# Insert new key if key not present
else:
store.insert().execute(key=k, value=v)
def keys(self):
'''Returns a list of keys in the store.'''
return list(i[0] for i in select(
[self._store.c.key]
).execute().fetchall())
__all__ = ['DbStore']

View File

@ -1,33 +0,0 @@
# -*- coding: utf-8 -*-
'''
DBM Database Store.
shove's psuedo-URL for DBM stores follows the form:
dbm://<path>
Where <path> is a URL path to a DBM database. Alternatively, the native
pathname to a DBM database can be passed as the 'engine' parameter.
'''
import anydbm
from shove.store import SyncStore
class DbmStore(SyncStore):
'''Class for variants of the DBM database.'''
init = 'dbm://'
def __init__(self, engine, **kw):
super(DbmStore, self).__init__(engine, **kw)
self._store = anydbm.open(self._engine, 'c')
try:
self.sync = self._store.sync
except AttributeError:
pass
__all__ = ['DbmStore']

View File

@ -1,43 +0,0 @@
# -*- coding: utf-8 -*-
'''
Durus object database frontend.
shove's psuedo-URL for Durus stores follows the form:
durus://<path>
Where the path is a URL path to a durus FileStorage database. Alternatively, a
native pathname to a durus database can be passed as the 'engine' parameter.
'''
try:
from durus.connection import Connection
from durus.file_storage import FileStorage
except ImportError:
raise ImportError('Requires Durus library')
from shove.store import SyncStore
class DurusStore(SyncStore):
'''Class for Durus object database frontend.'''
init = 'durus://'
def __init__(self, engine, **kw):
super(DurusStore, self).__init__(engine, **kw)
self._db = FileStorage(self._engine)
self._connection = Connection(self._db)
self.sync = self._connection.commit
self._store = self._connection.get_root()
def close(self):
'''Closes all open storage and connections.'''
self.sync()
self._db.close()
super(DurusStore, self).close()
__all__ = ['DurusStore']

View File

@ -1,25 +0,0 @@
# -*- coding: utf-8 -*-
'''
Filesystem-based object store
shove's psuedo-URL for filesystem-based stores follows the form:
file://<path>
Where the path is a URL path to a directory on a local filesystem.
Alternatively, a native pathname to the directory can be passed as the 'engine'
argument.
'''
from shove import BaseStore, FileBase
class FileStore(FileBase, BaseStore):
'''File-based store.'''
def __init__(self, engine, **kw):
super(FileStore, self).__init__(engine, **kw)
__all__ = ['FileStore']

View File

@ -1,88 +0,0 @@
# -*- coding: utf-8 -*-
'''
FTP-accessed stores
shove's URL for FTP accessed stores follows the standard form for FTP URLs
defined in RFC-1738:
ftp://<user>:<password>@<host>:<port>/<url-path>
'''
import urlparse
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from ftplib import FTP, error_perm
from shove import BaseStore
class FtpStore(BaseStore):
def __init__(self, engine, **kw):
super(FtpStore, self).__init__(engine, **kw)
user = kw.get('user', 'anonymous')
password = kw.get('password', '')
spliturl = urlparse.urlsplit(engine)
# Set URL, path, and strip 'ftp://' off
base, path = spliturl[1], spliturl[2] + '/'
if '@' in base:
auth, base = base.split('@')
user, password = auth.split(':')
self._store = FTP(base, user, password)
# Change to remote path if it exits
try:
self._store.cwd(path)
except error_perm:
self._makedir(path)
self._base, self._user, self._password = base, user, password
self._updated, self ._keys = True, None
def __getitem__(self, key):
try:
local = StringIO()
# Download item
self._store.retrbinary('RETR %s' % key, local.write)
self._updated = False
return self.loads(local.getvalue())
except:
raise KeyError(key)
def __setitem__(self, key, value):
local = StringIO(self.dumps(value))
self._store.storbinary('STOR %s' % key, local)
self._updated = True
def __delitem__(self, key):
try:
self._store.delete(key)
self._updated = True
except:
raise KeyError(key)
def _makedir(self, path):
'''Makes remote paths on an FTP server.'''
paths = list(reversed([i for i in path.split('/') if i != '']))
while paths:
tpath = paths.pop()
self._store.mkd(tpath)
self._store.cwd(tpath)
def keys(self):
'''Returns a list of keys in a store.'''
if self._updated or self._keys is None:
rlist, nlist = list(), list()
# Remote directory listing
self._store.retrlines('LIST -a', rlist.append)
for rlisting in rlist:
# Split remote file based on whitespace
rfile = rlisting.split()
# Append tuple of remote item type & name
if rfile[-1] not in ('.', '..') and rfile[0].startswith('-'):
nlist.append(rfile[-1])
self._keys = nlist
return self._keys
__all__ = ['FtpStore']

View File

@ -1,34 +0,0 @@
# -*- coding: utf-8 -*-
'''
HDF5 Database Store.
shove's psuedo-URL for HDF5 stores follows the form:
hdf5://<path>/<group>
Where <path> is a URL path to a HDF5 database. Alternatively, the native
pathname to a HDF5 database can be passed as the 'engine' parameter.
<group> is the name of the database.
'''
try:
import h5py
except ImportError:
raise ImportError('This store requires h5py library')
from shove.store import ClientStore
class HDF5Store(ClientStore):
'''LevelDB based store'''
init = 'hdf5://'
def __init__(self, engine, **kw):
super(HDF5Store, self).__init__(engine, **kw)
engine, group = self._engine.rsplit('/')
self._store = h5py.File(engine).require_group(group).attrs
__all__ = ['HDF5Store']

View File

@ -1,47 +0,0 @@
# -*- coding: utf-8 -*-
'''
LevelDB Database Store.
shove's psuedo-URL for LevelDB stores follows the form:
leveldb://<path>
Where <path> is a URL path to a LevelDB database. Alternatively, the native
pathname to a LevelDB database can be passed as the 'engine' parameter.
'''
try:
import leveldb
except ImportError:
raise ImportError('This store requires py-leveldb library')
from shove.store import ClientStore
class LevelDBStore(ClientStore):
'''LevelDB based store'''
init = 'leveldb://'
def __init__(self, engine, **kw):
super(LevelDBStore, self).__init__(engine, **kw)
self._store = leveldb.LevelDB(self._engine)
def __getitem__(self, key):
item = self.loads(self._store.Get(key))
if item is not None:
return item
raise KeyError(key)
def __setitem__(self, key, value):
self._store.Put(key, self.dumps(value))
def __delitem__(self, key):
self._store.Delete(key)
def keys(self):
return list(k for k in self._store.RangeIter(include_value=False))
__all__ = ['LevelDBStore']

View File

@ -1,38 +0,0 @@
# -*- coding: utf-8 -*-
'''
Thread-safe in-memory store.
The shove psuedo-URL for a memory store is:
memory://
'''
import copy
import threading
from shove import synchronized
from shove.store.simple import SimpleStore
class MemoryStore(SimpleStore):
'''Thread-safe in-memory store.'''
def __init__(self, engine, **kw):
super(MemoryStore, self).__init__(engine, **kw)
self._lock = threading.Condition()
@synchronized
def __getitem__(self, key):
return copy.deepcopy(super(MemoryStore, self).__getitem__(key))
@synchronized
def __setitem__(self, key, value):
super(MemoryStore, self).__setitem__(key, value)
@synchronized
def __delitem__(self, key):
super(MemoryStore, self).__delitem__(key)
__all__ = ['MemoryStore']

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
'''
Redis-based object store
The shove psuedo-URL for a redis-based store is:
redis://<host>:<port>/<db>
'''
import urlparse
try:
import redis
except ImportError:
raise ImportError('This store requires the redis library')
from shove.store import ClientStore
class RedisStore(ClientStore):
'''Redis based store'''
init = 'redis://'
def __init__(self, engine, **kw):
super(RedisStore, self).__init__(engine, **kw)
spliturl = urlparse.urlsplit(engine)
host, port = spliturl[1].split(':')
db = spliturl[2].replace('/', '')
self._store = redis.Redis(host, int(port), db)
def __contains__(self, key):
return self._store.exists(key)
def clear(self):
self._store.flushdb()
def keys(self):
return self._store.keys()
def setdefault(self, key, default=None):
return self._store.getset(key, default)
def update(self, other=None, **kw):
args = kw if other is not None else other
self._store.mset(args)
__all__ = ['RedisStore']

View File

@ -1,91 +0,0 @@
# -*- coding: utf-8 -*-
'''
S3-accessed stores
shove's psuedo-URL for stores found on Amazon.com's S3 web service follows this
form:
s3://<s3_key>:<s3_secret>@<bucket>
<s3_key> is the Access Key issued by Amazon
<s3_secret> is the Secret Access Key issued by Amazon
<bucket> is the name of the bucket accessed through the S3 service
'''
try:
from boto.s3.connection import S3Connection
from boto.s3.key import Key
except ImportError:
raise ImportError('Requires boto library')
from shove import BaseStore
class S3Store(BaseStore):
def __init__(self, engine=None, **kw):
super(S3Store, self).__init__(engine, **kw)
# key = Access Key, secret=Secret Access Key, bucket=bucket name
key, secret, bucket = kw.get('key'), kw.get('secret'), kw.get('bucket')
if engine is not None:
auth, bucket = engine.split('://')[1].split('@')
key, secret = auth.split(':')
# kw 'secure' = (True or False, use HTTPS)
self._conn = S3Connection(key, secret, kw.get('secure', False))
buckets = self._conn.get_all_buckets()
# Use bucket if it exists
for b in buckets:
if b.name == bucket:
self._store = b
break
# Create bucket if it doesn't exist
else:
self._store = self._conn.create_bucket(bucket)
# Set bucket permission ('private', 'public-read',
# 'public-read-write', 'authenticated-read'
self._store.set_acl(kw.get('acl', 'private'))
# Updated flag used for avoiding network calls
self._updated, self._keys = True, None
def __getitem__(self, key):
rkey = self._store.lookup(key)
if rkey is None:
raise KeyError(key)
# Fetch string
value = self.loads(rkey.get_contents_as_string())
# Flag that the store has not been updated
self._updated = False
return value
def __setitem__(self, key, value):
rkey = Key(self._store)
rkey.key = key
rkey.set_contents_from_string(self.dumps(value))
# Flag that the store has been updated
self._updated = True
def __delitem__(self, key):
try:
self._store.delete_key(key)
# Flag that the store has been updated
self._updated = True
except:
raise KeyError(key)
def keys(self):
'''Returns a list of keys in the store.'''
return list(i[0] for i in self.items())
def items(self):
'''Returns a list of items from the store.'''
if self._updated or self._keys is None:
self._keys = self._store.get_all_keys()
return list((str(k.key), k) for k in self._keys)
def iteritems(self):
'''Lazily returns items from the store.'''
for k in self.items():
yield (k.key, k)
__all__ = ['S3Store']

View File

@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
'''
Single-process in-memory store.
The shove psuedo-URL for a simple store is:
simple://
'''
from shove import BaseStore, SimpleBase
class SimpleStore(SimpleBase, BaseStore):
'''Single-process in-memory store.'''
def __init__(self, engine, **kw):
super(SimpleStore, self).__init__(engine, **kw)
__all__ = ['SimpleStore']

View File

@ -1,110 +0,0 @@
# -*- coding: utf-8 -*-
'''
subversion managed store.
The shove psuedo-URL used for a subversion store that is password protected is:
svn:<username><password>:<path>?url=<url>
or for non-password protected repositories:
svn://<path>?url=<url>
<path> is the local repository copy
<url> is the URL of the subversion repository
'''
import os
import urllib
import threading
try:
import pysvn
except ImportError:
raise ImportError('Requires Python Subversion library')
from shove import BaseStore, synchronized
class SvnStore(BaseStore):
'''Class for subversion store.'''
def __init__(self, engine=None, **kw):
super(SvnStore, self).__init__(engine, **kw)
# Get path, url from keywords if used
path, url = kw.get('path'), kw.get('url')
# Get username. password from keywords if used
user, password = kw.get('user'), kw.get('password')
# Process psuedo URL if used
if engine is not None:
path, query = engine.split('n://')[1].split('?')
url = query.split('=')[1]
# Check for username, password
if '@' in path:
auth, path = path.split('@')
user, password = auth.split(':')
path = urllib.url2pathname(path)
# Create subversion client
self._client = pysvn.Client()
# Assign username, password
if user is not None:
self._client.set_username(user)
if password is not None:
self._client.set_password(password)
# Verify that store exists in repository
try:
self._client.info2(url)
# Create store in repository if it doesn't exist
except pysvn.ClientError:
self._client.mkdir(url, 'Adding directory')
# Verify that local copy exists
try:
if self._client.info(path) is None:
self._client.checkout(url, path)
# Check it out if it doesn't exist
except pysvn.ClientError:
self._client.checkout(url, path)
self._path, self._url = path, url
# Lock
self._lock = threading.Condition()
@synchronized
def __getitem__(self, key):
try:
return self.loads(self._client.cat(self._key_to_file(key)))
except:
raise KeyError(key)
@synchronized
def __setitem__(self, key, value):
fname = self._key_to_file(key)
# Write value to file
open(fname, 'wb').write(self.dumps(value))
# Add to repository
if key not in self:
self._client.add(fname)
self._client.checkin([fname], 'Adding %s' % fname)
@synchronized
def __delitem__(self, key):
try:
fname = self._key_to_file(key)
self._client.remove(fname)
# Remove deleted value from repository
self._client.checkin([fname], 'Removing %s' % fname)
except:
raise KeyError(key)
def _key_to_file(self, key):
'''Gives the filesystem path for a key.'''
return os.path.join(self._path, urllib.quote_plus(key))
@synchronized
def keys(self):
'''Returns a list of keys in the subversion repository.'''
return list(str(i.name.split('/')[-1]) for i
in self._client.ls(self._path))
__all__ = ['SvnStore']

View File

@ -1,48 +0,0 @@
# -*- coding: utf-8 -*-
'''
Zope Object Database store frontend.
shove's psuedo-URL for ZODB stores follows the form:
zodb:<path>
Where the path is a URL path to a ZODB FileStorage database. Alternatively, a
native pathname to a ZODB database can be passed as the 'engine' argument.
'''
try:
import transaction
from ZODB import FileStorage, DB
except ImportError:
raise ImportError('Requires ZODB library')
from shove.store import SyncStore
class ZodbStore(SyncStore):
'''ZODB store front end.'''
init = 'zodb://'
def __init__(self, engine, **kw):
super(ZodbStore, self).__init__(engine, **kw)
# Handle psuedo-URL
self._storage = FileStorage.FileStorage(self._engine)
self._db = DB(self._storage)
self._connection = self._db.open()
self._store = self._connection.root()
# Keeps DB in synch through commits of transactions
self.sync = transaction.commit
def close(self):
'''Closes all open storage and connections.'''
self.sync()
super(ZodbStore, self).close()
self._connection.close()
self._db.close()
self._storage.close()
__all__ = ['ZodbStore']

View File

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

View File

@ -1,133 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestBsdbStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('bsddb://test.db', compress=True)
def tearDown(self):
import os
self.store.close()
os.remove('test.db')
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,137 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestCassandraStore(unittest.TestCase):
def setUp(self):
from shove import Shove
from pycassa.system_manager import SystemManager
system_manager = SystemManager('localhost:9160')
try:
system_manager.create_column_family('Foo', 'shove')
except:
pass
self.store = Shove('cassandra://localhost:9160/Foo/shove')
def tearDown(self):
self.store.clear()
self.store.close()
from pycassa.system_manager import SystemManager
system_manager = SystemManager('localhost:9160')
system_manager.drop_column_family('Foo', 'shove')
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
# def test_clear(self):
# self.store['max'] = 3
# self.store['min'] = 6
# self.store['pow'] = 7
# self.store.clear()
# self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
# def test_popitem(self):
# self.store['max'] = 3
# self.store['min'] = 6
# self.store['pow'] = 7
# item = self.store.popitem()
# self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
# self.store['pow'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store.setdefault('pow', 8), 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,54 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestDbCache(unittest.TestCase):
initstring = 'sqlite:///'
def setUp(self):
from shove.cache.db import DbCache
self.cache = DbCache(self.initstring)
def tearDown(self):
self.cache = None
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.db import DbCache
cache = DbCache(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(2)
def tmp():
cache['test']
self.assertRaises(KeyError, tmp)
def test_cull(self):
from shove.cache.db import DbCache
cache = DbCache(self.initstring, max_entries=1)
cache['test'] = 'test'
cache['test2'] = 'test'
cache['test2'] = 'test'
self.assertEquals(len(cache), 1)
if __name__ == '__main__':
unittest.main()

View File

@ -1,131 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestDbStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('sqlite://', compress=True)
def tearDown(self):
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,136 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestDbmStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('dbm://test.dbm', compress=True)
def tearDown(self):
import os
self.store.close()
try:
os.remove('test.dbm.db')
except OSError:
pass
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.setdefault('how', 8)
self.assertEqual(self.store['how'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,133 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestDurusStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('durus://test.durus', compress=True)
def tearDown(self):
import os
self.store.close()
os.remove('test.durus')
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,58 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestFileCache(unittest.TestCase):
initstring = 'file://test'
def setUp(self):
from shove.cache.file import FileCache
self.cache = FileCache(self.initstring)
def tearDown(self):
import os
self.cache = None
for x in os.listdir('test'):
os.remove(os.path.join('test', x))
os.rmdir('test')
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.file import FileCache
cache = FileCache(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(2)
def tmp():
cache['test']
self.assertRaises(KeyError, tmp)
def test_cull(self):
from shove.cache.file import FileCache
cache = FileCache(self.initstring, max_entries=1)
cache['test'] = 'test'
cache['test2'] = 'test'
num = len(cache)
self.assertEquals(num, 1)
if __name__ == '__main__':
unittest.main()

View File

@ -1,140 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestFileStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('file://test', compress=True)
def tearDown(self):
import os
self.store.close()
for x in os.listdir('test'):
os.remove(os.path.join('test', x))
os.rmdir('test')
def test__getitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.store.sync()
tstore.sync()
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,149 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestFtpStore(unittest.TestCase):
ftpstring = 'put ftp string here'
def setUp(self):
from shove import Shove
self.store = Shove(self.ftpstring, compress=True)
def tearDown(self):
self.store.clear()
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.store.sync()
tstore.sync()
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.store.sync()
self.assertEqual(len(self.store), 2)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
self.store.sync()
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
item = self.store.popitem()
self.store.sync()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.store.sync()
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.sync()
self.store.update(tstore)
self.store.sync()
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,135 +0,0 @@
# -*- coding: utf-8 -*-
import unittest2
class TestHDF5Store(unittest2.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('hdf5://test.hdf5/test')
def tearDown(self):
import os
self.store.close()
try:
os.remove('test.hdf5')
except OSError:
pass
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.setdefault('bow', 8)
self.assertEqual(self.store['bow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest2.main()

View File

@ -1,132 +0,0 @@
# -*- coding: utf-8 -*-
import unittest2
class TestLevelDBStore(unittest2.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('leveldb://test', compress=True)
def tearDown(self):
import shutil
shutil.rmtree('test')
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.setdefault('bow', 8)
self.assertEqual(self.store['bow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest2.main()

View File

@ -1,46 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestMemcached(unittest.TestCase):
initstring = 'memcache://localhost:11211'
def setUp(self):
from shove.cache.memcached import MemCached
self.cache = MemCached(self.initstring)
def tearDown(self):
self.cache = None
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.memcached import MemCached
cache = MemCached(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(1)
def tmp():
cache['test']
self.assertRaises(KeyError, tmp)
if __name__ == '__main__':
unittest.main()

View File

@ -1,54 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestMemoryCache(unittest.TestCase):
initstring = 'memory://'
def setUp(self):
from shove.cache.memory import MemoryCache
self.cache = MemoryCache(self.initstring)
def tearDown(self):
self.cache = None
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.memory import MemoryCache
cache = MemoryCache(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(1)
def tmp():
cache['test']
self.assertRaises(KeyError, tmp)
def test_cull(self):
from shove.cache.memory import MemoryCache
cache = MemoryCache(self.initstring, max_entries=1)
cache['test'] = 'test'
cache['test2'] = 'test'
cache['test2'] = 'test'
self.assertEquals(len(cache), 1)
if __name__ == '__main__':
unittest.main()

View File

@ -1,135 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestMemoryStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('memory://', compress=True)
def tearDown(self):
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.store.sync()
tstore.sync()
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,45 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestRedisCache(unittest.TestCase):
initstring = 'redis://localhost:6379/0'
def setUp(self):
from shove.cache.redisdb import RedisCache
self.cache = RedisCache(self.initstring)
def tearDown(self):
self.cache = None
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.redisdb import RedisCache
cache = RedisCache(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(3)
def tmp(): #@IgnorePep8
return cache['test']
self.assertRaises(KeyError, tmp)
if __name__ == '__main__':
unittest.main()

View File

@ -1,128 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestRedisStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('redis://localhost:6379/0')
def tearDown(self):
self.store.clear()
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store.setdefault('pow', 8), 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,149 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestS3Store(unittest.TestCase):
s3string = 's3 test string here'
def setUp(self):
from shove import Shove
self.store = Shove(self.s3string, compress=True)
def tearDown(self):
self.store.clear()
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.store.sync()
tstore.sync()
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.store.sync()
self.assertEqual(len(self.store), 2)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
self.store.sync()
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
item = self.store.popitem()
self.store.sync()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.store.sync()
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.sync()
self.store.update(tstore)
self.store.sync()
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.sync()
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

View File

@ -1,54 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestSimpleCache(unittest.TestCase):
initstring = 'simple://'
def setUp(self):
from shove.cache.simple import SimpleCache
self.cache = SimpleCache(self.initstring)
def tearDown(self):
self.cache = None
def test_getitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_setitem(self):
self.cache['test'] = 'test'
self.assertEqual(self.cache['test'], 'test')
def test_delitem(self):
self.cache['test'] = 'test'
del self.cache['test']
self.assertEqual('test' in self.cache, False)
def test_get(self):
self.assertEqual(self.cache.get('min'), None)
def test_timeout(self):
import time
from shove.cache.simple import SimpleCache
cache = SimpleCache(self.initstring, timeout=1)
cache['test'] = 'test'
time.sleep(1)
def tmp():
cache['test']
self.assertRaises(KeyError, tmp)
def test_cull(self):
from shove.cache.simple import SimpleCache
cache = SimpleCache(self.initstring, max_entries=1)
cache['test'] = 'test'
cache['test2'] = 'test'
cache['test2'] = 'test'
self.assertEquals(len(cache), 1)
if __name__ == '__main__':
unittest.main()

View File

@ -1,135 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
class TestSimpleStore(unittest.TestCase):
def setUp(self):
from shove import Shove
self.store = Shove('simple://', compress=True)
def tearDown(self):
self.store.close()
def test__getitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__setitem__(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store['max'], 3)
def test__delitem__(self):
self.store['max'] = 3
del self.store['max']
self.assertEqual('max' in self.store, False)
def test_get(self):
self.store['max'] = 3
self.store.sync()
self.assertEqual(self.store.get('min'), None)
def test__cmp__(self):
from shove import Shove
tstore = Shove()
self.store['max'] = 3
tstore['max'] = 3
self.store.sync()
tstore.sync()
self.assertEqual(self.store, tstore)
def test__len__(self):
self.store['max'] = 3
self.store['min'] = 6
self.assertEqual(len(self.store), 2)
def test_close(self):
self.store.close()
self.assertEqual(self.store, None)
def test_clear(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
self.store.clear()
self.assertEqual(len(self.store), 0)
def test_items(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.items())
self.assertEqual(('min', 6) in slist, True)
def test_iteritems(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iteritems())
self.assertEqual(('min', 6) in slist, True)
def test_iterkeys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.iterkeys())
self.assertEqual('min' in slist, True)
def test_itervalues(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = list(self.store.itervalues())
self.assertEqual(6 in slist, True)
def test_pop(self):
self.store['max'] = 3
self.store['min'] = 6
item = self.store.pop('min')
self.assertEqual(item, 6)
def test_popitem(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
item = self.store.popitem()
self.assertEqual(len(item) + len(self.store), 4)
def test_setdefault(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['powl'] = 7
self.store.setdefault('pow', 8)
self.assertEqual(self.store['pow'], 8)
def test_update(self):
from shove import Shove
tstore = Shove()
tstore['max'] = 3
tstore['min'] = 6
tstore['pow'] = 7
self.store['max'] = 2
self.store['min'] = 3
self.store['pow'] = 7
self.store.update(tstore)
self.assertEqual(self.store['min'], 6)
def test_values(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.values()
self.assertEqual(6 in slist, True)
def test_keys(self):
self.store['max'] = 3
self.store['min'] = 6
self.store['pow'] = 7
slist = self.store.keys()
self.assertEqual('min' in slist, True)
if __name__ == '__main__':
unittest.main()

Some files were not shown because too many files have changed in this diff Show More