mirror of
https://github.com/moparisthebest/SickRage
synced 2024-11-15 13:55:05 -05:00
Merge pull request #150 from Prinz23/feature/ChangeShowNetworkSettingHandler
Change how the "local/network" setting is handled to address some issues...
This commit is contained in:
commit
15fff14355
@ -54,6 +54,7 @@
|
|||||||
* Fix post processing when using tvrage indexer and mediabrowser metadata generation
|
* Fix post processing when using tvrage indexer and mediabrowser metadata generation
|
||||||
* Change reporting failed network_timezones.txt updates from an error to a warning
|
* Change reporting failed network_timezones.txt updates from an error to a warning
|
||||||
* Fix missing header and "on <missing text>" when network is none and Layout "Poster" with Sort By "Network" on coming episodes page.
|
* Fix missing header and "on <missing text>" when network is none and Layout "Poster" with Sort By "Network" on coming episodes page.
|
||||||
|
* Change how the "local/network" setting is handled to address some issues
|
||||||
|
|
||||||
[develop changelog]
|
[develop changelog]
|
||||||
* Change improve display of progress bars in the Downloads columns of the show list page
|
* Change improve display of progress bars in the Downloads columns of the show list page
|
||||||
|
@ -466,7 +466,7 @@
|
|||||||
</td>
|
</td>
|
||||||
|
|
||||||
<td class="col-airdate">
|
<td class="col-airdate">
|
||||||
<span class="${fuzzydate}">#if int($epResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($epResult["airdate"],$show.airs,$show.network))#</span>
|
<span class="${fuzzydate}">#if int($epResult['airdate']) == 1 then 'never' else $sbdatetime.sbdatetime.sbfdate($sbdatetime.sbdatetime.convert_to_setting($network_timezones.parse_date_time($epResult['airdate'],$show.airs,$show.network)))#</span>
|
||||||
</td>
|
</td>
|
||||||
|
|
||||||
#if $sickbeard.USE_SUBTITLES and $show.subtitles:
|
#if $sickbeard.USE_SUBTITLES and $show.subtitles:
|
||||||
|
@ -329,7 +329,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||||||
|
|
||||||
#set $data_date = '6000000000.0'
|
#set $data_date = '6000000000.0'
|
||||||
#if $cur_airs_next:
|
#if $cur_airs_next:
|
||||||
#set $data_date = $time.mktime($network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network).timetuple())
|
#set $data_date = $time.mktime($sbdatetime.sbdatetime.convert_to_setting($network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network)).timetuple())
|
||||||
#else if None is not $display_status
|
#else if None is not $display_status
|
||||||
#if 'nded' not in $display_status and 1 == int($curShow.paused)
|
#if 'nded' not in $display_status and 1 == int($curShow.paused)
|
||||||
#set $data_date = '5000000500.0'
|
#set $data_date = '5000000500.0'
|
||||||
@ -374,7 +374,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||||||
|
|
||||||
<div class="show-date">
|
<div class="show-date">
|
||||||
#if $cur_airs_next
|
#if $cur_airs_next
|
||||||
#set $ldatetime = $network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network)
|
#set $ldatetime = $sbdatetime.sbdatetime.convert_to_setting($network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network))
|
||||||
<span class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdate($ldatetime)</span>
|
<span class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdate($ldatetime)</span>
|
||||||
#else
|
#else
|
||||||
#set $output_html = '?'
|
#set $output_html = '?'
|
||||||
@ -526,7 +526,7 @@ $myShowList.sort(lambda x, y: cmp(x.name, y.name))
|
|||||||
<tr>
|
<tr>
|
||||||
|
|
||||||
#if $cur_airs_next
|
#if $cur_airs_next
|
||||||
#set $ldatetime = $network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network)
|
#set $ldatetime = $sbdatetime.sbdatetime.convert_to_setting($network_timezones.parse_date_time($cur_airs_next,$curShow.airs,$curShow.network))
|
||||||
<td align="center" class="nowrap"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdate($ldatetime)</div><span class="sort_data">$time.mktime($ldatetime.timetuple())</span></td>
|
<td align="center" class="nowrap"><div class="${fuzzydate}">$sbdatetime.sbdatetime.sbfdate($ldatetime)</div><span class="sort_data">$time.mktime($ldatetime.timetuple())</span></td>
|
||||||
#else:
|
#else:
|
||||||
<td align="center" class="nowrap"></td>
|
<td align="center" class="nowrap"></td>
|
||||||
|
@ -2,14 +2,14 @@
|
|||||||
#import datetime
|
#import datetime
|
||||||
#from sickbeard.common import *
|
#from sickbeard.common import *
|
||||||
#from sickbeard import sbdatetime, network_timezones
|
#from sickbeard import sbdatetime, network_timezones
|
||||||
#set global $title="Backlog Overview"
|
#set global $title = 'Backlog Overview'
|
||||||
#set global $header="Backlog Overview"
|
#set global $header = 'Backlog Overview'
|
||||||
|
|
||||||
#set global $sbPath=".."
|
#set global $sbPath = '..'
|
||||||
|
|
||||||
#set global $topmenu="manage"#
|
#set global $topmenu = 'manage'#
|
||||||
#import os.path
|
#import os.path
|
||||||
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_top.tmpl")
|
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||||
|
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
<!--
|
<!--
|
||||||
@ -18,7 +18,7 @@
|
|||||||
\$('#pickShow').change(function(){
|
\$('#pickShow').change(function(){
|
||||||
var id = \$(this).val();
|
var id = \$(this).val();
|
||||||
if (id) {
|
if (id) {
|
||||||
\$('html,body').animate({scrollTop: \$("#show-"+id).offset().top -25},'slow');
|
\$('html,body').animate({scrollTop: \$('#show-' + id).offset().top -25},'slow');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -90,7 +90,7 @@ Jump to Show
|
|||||||
<tr class="seasoncols"><th>Episode</th><th>Name</th><th class="nowrap">Airdate</th></tr>
|
<tr class="seasoncols"><th>Episode</th><th>Name</th><th class="nowrap">Airdate</th></tr>
|
||||||
|
|
||||||
#for $curResult in $showSQLResults[$curShow.indexerid]:
|
#for $curResult in $showSQLResults[$curShow.indexerid]:
|
||||||
#set $whichStr = $str($curResult["season"]) + "x" + $str($curResult["episode"])
|
#set $whichStr = $str($curResult['season']) + 'x' + $str($curResult['episode'])
|
||||||
#try:
|
#try:
|
||||||
#set $overview = $showCats[$curShow.indexerid][$whichStr]
|
#set $overview = $showCats[$curShow.indexerid][$whichStr]
|
||||||
#except Exception
|
#except Exception
|
||||||
@ -104,7 +104,7 @@ Jump to Show
|
|||||||
<tr class="seasonstyle $Overview.overviewStrings[$showCats[$curShow.indexerid][$whichStr]]">
|
<tr class="seasonstyle $Overview.overviewStrings[$showCats[$curShow.indexerid][$whichStr]]">
|
||||||
<td class="tableleft" align="center">$whichStr</td>
|
<td class="tableleft" align="center">$whichStr</td>
|
||||||
<td>$curResult["name"]</td>
|
<td>$curResult["name"]</td>
|
||||||
<td class="tableright" align="center" class="nowrap"><div class="${fuzzydate}">#if int($curResult["airdate"]) == 1 then "never" else $sbdatetime.sbdatetime.sbfdate($network_timezones.parse_date_time($curResult["airdate"],$curShow.airs,$curShow.network))#</div></td>
|
<td class="tableright" align="center" class="nowrap"><div class="${fuzzydate}">#if int($curResult['airdate']) == 1 then 'never' else $sbdatetime.sbdatetime.sbfdate($sbdatetime.sbdatetime.convert_to_setting($network_timezones.parse_date_time($curResult['airdate'],$curShow.airs,$curShow.network)))#</div></td>
|
||||||
</tr>
|
</tr>
|
||||||
#end for
|
#end for
|
||||||
|
|
||||||
@ -113,4 +113,4 @@ Jump to Show
|
|||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
#include $os.path.join($sickbeard.PROG_DIR,"gui/slick/interfaces/default/inc_bottom.tmpl")
|
#include $os.path.join($sickbeard.PROG_DIR,'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||||
|
@ -28,6 +28,7 @@ from sickbeard import db
|
|||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
from sickbeard import exceptions
|
from sickbeard import exceptions
|
||||||
|
from sickbeard import network_timezones
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
|
|
||||||
@ -42,7 +43,15 @@ class DailySearcher():
|
|||||||
|
|
||||||
logger.log(u"Searching for new released episodes ...")
|
logger.log(u"Searching for new released episodes ...")
|
||||||
|
|
||||||
curDate = datetime.date.today().toordinal()
|
if not network_timezones.network_dict:
|
||||||
|
network_timezones.update_network_dict()
|
||||||
|
|
||||||
|
if network_timezones.network_dict:
|
||||||
|
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
||||||
|
else:
|
||||||
|
curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
|
||||||
|
|
||||||
|
curTime = datetime.datetime.now(network_timezones.sb_timezone)
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
myDB = db.DBConnection()
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?",
|
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?",
|
||||||
@ -61,7 +70,16 @@ class DailySearcher():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
except exceptions.MultipleShowObjectsException:
|
except exceptions.MultipleShowObjectsException:
|
||||||
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
|
logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid']))
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
||||||
|
# filter out any episodes that haven't aried yet
|
||||||
|
if end_time > curTime:
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
# if an error occured assume the episode hasn't aired yet
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
|
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sickbeard
|
|
||||||
from lib.dateutil import tz
|
from lib.dateutil import tz
|
||||||
import lib.dateutil.zoneinfo
|
import lib.dateutil.zoneinfo
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
@ -26,18 +25,18 @@ from sickbeard import encodingKludge as ek
|
|||||||
from os.path import basename, join, isfile
|
from os.path import basename, join, isfile
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import time
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# regex to parse time (12/24 hour format)
|
# regex to parse time (12/24 hour format)
|
||||||
time_regex = re.compile(r"(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b", flags=re.IGNORECASE)
|
time_regex = re.compile(r'(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b', flags=re.IGNORECASE)
|
||||||
am_regex = re.compile(r"(A[. ]? ?M)", flags=re.IGNORECASE)
|
am_regex = re.compile(r'(A[. ]? ?M)', flags=re.IGNORECASE)
|
||||||
pm_regex = re.compile(r"(P[. ]? ?M)", flags=re.IGNORECASE)
|
pm_regex = re.compile(r'(P[. ]? ?M)', flags=re.IGNORECASE)
|
||||||
|
|
||||||
network_dict = None
|
network_dict = None
|
||||||
|
|
||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
|
|
||||||
|
|
||||||
# helper to remove failed temp download
|
# helper to remove failed temp download
|
||||||
def _remove_zoneinfo_failed(filename):
|
def _remove_zoneinfo_failed(filename):
|
||||||
try:
|
try:
|
||||||
@ -48,7 +47,7 @@ def _remove_zoneinfo_failed(filename):
|
|||||||
|
|
||||||
# helper to remove old unneeded zoneinfo files
|
# helper to remove old unneeded zoneinfo files
|
||||||
def _remove_old_zoneinfo():
|
def _remove_old_zoneinfo():
|
||||||
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
|
||||||
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
@ -63,9 +62,9 @@ def _remove_old_zoneinfo():
|
|||||||
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
|
if file_w_path != cur_file and ek.ek(isfile, file_w_path):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove, file_w_path)
|
ek.ek(os.remove, file_w_path)
|
||||||
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + file_w_path, logger.ERROR)
|
logger.log(u'Unable to delete: %s' % file_w_path, logger.ERROR)
|
||||||
|
|
||||||
|
|
||||||
# update the dateutil zoneinfo
|
# update the dateutil zoneinfo
|
||||||
@ -79,48 +78,49 @@ def _update_zoneinfo():
|
|||||||
url_data = helpers.getURL(url_zv)
|
url_data = helpers.getURL(url_zv)
|
||||||
if url_data is None:
|
if url_data is None:
|
||||||
# When urlData is None, trouble connecting to github
|
# When urlData is None, trouble connecting to github
|
||||||
logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
|
logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv,
|
||||||
|
logger.WARNING)
|
||||||
return
|
return
|
||||||
|
|
||||||
if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
|
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
|
||||||
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
|
||||||
else:
|
else:
|
||||||
cur_zoneinfo = None
|
cur_zoneinfo = None
|
||||||
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
|
(new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
|
||||||
|
|
||||||
if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)):
|
if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
|
||||||
return
|
return
|
||||||
|
|
||||||
# now load the new zoneinfo
|
# now load the new zoneinfo
|
||||||
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/' + new_zoneinfo
|
url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo
|
||||||
|
|
||||||
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
|
||||||
zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile)
|
zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)
|
||||||
|
|
||||||
if (ek.ek(os.path.exists, zonefile_tmp)):
|
if ek.ek(os.path.exists, zonefile_tmp):
|
||||||
try:
|
try:
|
||||||
ek.ek(os.remove, zonefile_tmp)
|
ek.ek(os.remove, zonefile_tmp)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR)
|
logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not helpers.download_file(url_tar, zonefile_tmp):
|
if not helpers.download_file(url_tar, zonefile_tmp):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ek.ek(os.path.exists, zonefile_tmp):
|
if not ek.ek(os.path.exists, zonefile_tmp):
|
||||||
logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR)
|
logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
new_hash = str(helpers.md5_for_file(zonefile_tmp))
|
||||||
|
|
||||||
if (zoneinfo_md5.upper() == new_hash.upper()):
|
if zoneinfo_md5.upper() == new_hash.upper():
|
||||||
logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE)
|
logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE)
|
||||||
try:
|
try:
|
||||||
# remove the old zoneinfo file
|
# remove the old zoneinfo file
|
||||||
if (cur_zoneinfo is not None):
|
if cur_zoneinfo is not None:
|
||||||
old_file = helpers.real_path(
|
old_file = helpers.real_path(
|
||||||
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
|
||||||
if (ek.ek(os.path.exists, old_file)):
|
if ek.ek(os.path.exists, old_file):
|
||||||
ek.ek(os.remove, old_file)
|
ek.ek(os.remove, old_file)
|
||||||
# rename downloaded file
|
# rename downloaded file
|
||||||
ek.ek(os.rename, zonefile_tmp, zonefile)
|
ek.ek(os.rename, zonefile_tmp, zonefile)
|
||||||
@ -132,7 +132,7 @@ def _update_zoneinfo():
|
|||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
_remove_zoneinfo_failed(zonefile_tmp)
|
_remove_zoneinfo_failed(zonefile_tmp)
|
||||||
logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR)
|
logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@ -149,7 +149,7 @@ def update_network_dict():
|
|||||||
url_data = helpers.getURL(url)
|
url_data = helpers.getURL(url)
|
||||||
if url_data is None:
|
if url_data is None:
|
||||||
# When urlData is None, trouble connecting to github
|
# When urlData is None, trouble connecting to github
|
||||||
logger.log(u"Updating Network Timezones failed, this can happen from time to time. URL: " + url, logger.WARNING)
|
logger.log(u'Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.WARNING)
|
||||||
load_network_dict()
|
load_network_dict()
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -162,10 +162,10 @@ def update_network_dict():
|
|||||||
except (IOError, OSError):
|
except (IOError, OSError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
myDB = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
|
|
||||||
# load current network timezones
|
# load current network timezones
|
||||||
old_d = dict(myDB.select("SELECT * FROM network_timezones"))
|
old_d = dict(my_db.select('SELECT * FROM network_timezones'))
|
||||||
|
|
||||||
# list of sql commands to update the network_timezones table
|
# list of sql commands to update the network_timezones table
|
||||||
cl = []
|
cl = []
|
||||||
@ -174,33 +174,32 @@ def update_network_dict():
|
|||||||
if h_k and cur_t != old_d[cur_d]:
|
if h_k and cur_t != old_d[cur_d]:
|
||||||
# update old record
|
# update old record
|
||||||
cl.append(
|
cl.append(
|
||||||
["UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?", [cur_d, cur_t, cur_d]])
|
['UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?', [cur_d, cur_t, cur_d]])
|
||||||
elif not h_k:
|
elif not h_k:
|
||||||
# add new record
|
# add new record
|
||||||
cl.append(["INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)", [cur_d, cur_t]])
|
cl.append(['INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)', [cur_d, cur_t]])
|
||||||
if h_k:
|
if h_k:
|
||||||
del old_d[cur_d]
|
del old_d[cur_d]
|
||||||
|
|
||||||
# remove deleted records
|
# remove deleted records
|
||||||
if len(old_d) > 0:
|
if len(old_d) > 0:
|
||||||
L = list(va for va in old_d)
|
old_items = list(va for va in old_d)
|
||||||
cl.append(["DELETE FROM network_timezones WHERE network_name IN (" + ','.join(['?'] * len(L)) + ")", L])
|
cl.append(['DELETE FROM network_timezones WHERE network_name IN (%s)' % ','.join(['?'] * len(old_items)), old_items])
|
||||||
|
|
||||||
# change all network timezone infos at once (much faster)
|
# change all network timezone infos at once (much faster)
|
||||||
if len(cl) > 0:
|
if len(cl) > 0:
|
||||||
myDB.mass_action(cl)
|
my_db.mass_action(cl)
|
||||||
load_network_dict()
|
load_network_dict()
|
||||||
|
|
||||||
|
|
||||||
# load network timezones from db into dict
|
# load network timezones from db into dict
|
||||||
def load_network_dict():
|
def load_network_dict():
|
||||||
d = {}
|
|
||||||
try:
|
try:
|
||||||
myDB = db.DBConnection('cache.db')
|
my_db = db.DBConnection('cache.db')
|
||||||
cur_network_list = myDB.select("SELECT * FROM network_timezones")
|
cur_network_list = my_db.select('SELECT * FROM network_timezones')
|
||||||
if cur_network_list is None or len(cur_network_list) < 1:
|
if cur_network_list is None or len(cur_network_list) < 1:
|
||||||
update_network_dict()
|
update_network_dict()
|
||||||
cur_network_list = myDB.select("SELECT * FROM network_timezones")
|
cur_network_list = my_db.select('SELECT * FROM network_timezones')
|
||||||
d = dict(cur_network_list)
|
d = dict(cur_network_list)
|
||||||
except:
|
except:
|
||||||
d = {}
|
d = {}
|
||||||
@ -266,14 +265,12 @@ def parse_date_time(d, t, network):
|
|||||||
|
|
||||||
te = datetime.datetime.fromordinal(helpers.tryInt(d))
|
te = datetime.datetime.fromordinal(helpers.tryInt(d))
|
||||||
try:
|
try:
|
||||||
if sickbeard.TIMEZONE_DISPLAY == 'local':
|
|
||||||
foreign_timezone = get_network_timezone(network, network_dict)
|
foreign_timezone = get_network_timezone(network, network_dict)
|
||||||
foreign_naive = datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=foreign_timezone)
|
foreign_naive = datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=foreign_timezone)
|
||||||
return foreign_naive.astimezone(sb_timezone)
|
return foreign_naive
|
||||||
else:
|
|
||||||
return datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=sb_timezone)
|
|
||||||
except:
|
except:
|
||||||
return datetime.datetime(te.year, te.month, te.day, hr, m)
|
return datetime.datetime(te.year, te.month, te.day, hr, m, tzinfo=sb_timezone)
|
||||||
|
|
||||||
|
|
||||||
def test_timeformat(t):
|
def test_timeformat(t):
|
||||||
mo = time_regex.search(t)
|
mo = time_regex.search(t)
|
||||||
|
@ -21,6 +21,7 @@ import locale
|
|||||||
import functools
|
import functools
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
from sickbeard.network_timezones import sb_timezone
|
||||||
|
|
||||||
date_presets = ('%Y-%m-%d',
|
date_presets = ('%Y-%m-%d',
|
||||||
'%a, %Y-%m-%d',
|
'%a, %Y-%m-%d',
|
||||||
@ -100,13 +101,31 @@ class static_or_instance(object):
|
|||||||
# subclass datetime.datetime to add function to display custom date and time formats
|
# subclass datetime.datetime to add function to display custom date and time formats
|
||||||
class sbdatetime(datetime.datetime):
|
class sbdatetime(datetime.datetime):
|
||||||
has_locale = True
|
has_locale = True
|
||||||
ORIG_LC_TIME = locale.LC_TIME
|
|
||||||
|
@static_or_instance
|
||||||
|
def convert_to_setting(self, dt=None):
|
||||||
|
try:
|
||||||
|
if sickbeard.TIMEZONE_DISPLAY == 'local':
|
||||||
|
if self is None:
|
||||||
|
return dt.astimezone(sb_timezone)
|
||||||
|
else:
|
||||||
|
return self.astimezone(sb_timezone)
|
||||||
|
else:
|
||||||
|
if self is None:
|
||||||
|
return dt
|
||||||
|
else:
|
||||||
|
return self
|
||||||
|
except:
|
||||||
|
if self is None:
|
||||||
|
return dt
|
||||||
|
else:
|
||||||
|
return self
|
||||||
|
|
||||||
# display Time in SickRage Format
|
# display Time in SickRage Format
|
||||||
@static_or_instance
|
@static_or_instance
|
||||||
def sbftime(self, dt=None, show_seconds=False, t_preset=None):
|
def sbftime(self, dt=None, show_seconds=False, t_preset=None):
|
||||||
|
|
||||||
try:locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
try:locale.setlocale(locale.LC_TIME, '')
|
||||||
except:pass
|
except:pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -135,7 +154,7 @@ class sbdatetime(datetime.datetime):
|
|||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
if sbdatetime.has_locale:
|
if sbdatetime.has_locale:
|
||||||
locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
locale.setlocale(locale.LC_TIME, '')
|
||||||
except:
|
except:
|
||||||
sbdatetime.has_locale = False
|
sbdatetime.has_locale = False
|
||||||
|
|
||||||
@ -146,7 +165,7 @@ class sbdatetime(datetime.datetime):
|
|||||||
def sbfdate(self, dt=None, d_preset=None):
|
def sbfdate(self, dt=None, d_preset=None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
locale.setlocale(locale.LC_TIME, '')
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -166,7 +185,7 @@ class sbdatetime(datetime.datetime):
|
|||||||
finally:
|
finally:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
locale.setlocale(locale.LC_TIME, '')
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -177,7 +196,7 @@ class sbdatetime(datetime.datetime):
|
|||||||
def sbfdatetime(self, dt=None, show_seconds=False, d_preset=None, t_preset=None):
|
def sbfdatetime(self, dt=None, show_seconds=False, d_preset=None, t_preset=None):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
locale.setlocale(locale.LC_TIME, '')
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -219,7 +238,7 @@ class sbdatetime(datetime.datetime):
|
|||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
if sbdatetime.has_locale:
|
if sbdatetime.has_locale:
|
||||||
locale.setlocale(locale.LC_TIME, self.ORIG_LC_TIME)
|
locale.setlocale(locale.LC_TIME, '')
|
||||||
except:
|
except:
|
||||||
sbdatetime.has_locale = False
|
sbdatetime.has_locale = False
|
||||||
|
|
||||||
|
@ -778,7 +778,7 @@ class CMD_ComingEpisodes(ApiCall):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
|
ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
|
||||||
dtEpisodeAirs = network_timezones.parse_date_time(int(ep['airdate']), ep['airs'], ep['network'])
|
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(int(ep['airdate']), ep['airs'], ep['network']))
|
||||||
ep['airdate'] = dtEpisodeAirs.toordinal()
|
ep['airdate'] = dtEpisodeAirs.toordinal()
|
||||||
|
|
||||||
status = "soon"
|
status = "soon"
|
||||||
@ -868,7 +868,7 @@ class CMD_Episode(ApiCall):
|
|||||||
elif not showPath: # show dir is broken ... episode path will be empty
|
elif not showPath: # show dir is broken ... episode path will be empty
|
||||||
episode["location"] = ""
|
episode["location"] = ""
|
||||||
# convert stuff to human form
|
# convert stuff to human form
|
||||||
episode['airdate'] = sbdatetime.sbdatetime.sbfdate(network_timezones.parse_date_time(int(episode['airdate']), showObj.airs, showObj.network), d_preset=dateFormat)
|
episode['airdate'] = sbdatetime.sbdatetime.sbfdate(sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(int(episode['airdate']), showObj.airs, showObj.network)), d_preset=dateFormat)
|
||||||
status, quality = Quality.splitCompositeStatus(int(episode["status"]))
|
status, quality = Quality.splitCompositeStatus(int(episode["status"]))
|
||||||
episode["status"] = _get_status_Strings(status)
|
episode["status"] = _get_status_Strings(status)
|
||||||
episode["quality"] = _get_quality_string(quality)
|
episode["quality"] = _get_quality_string(quality)
|
||||||
@ -1813,7 +1813,7 @@ class CMD_Show(ApiCall):
|
|||||||
showDict["status"] = showObj.status
|
showDict["status"] = showObj.status
|
||||||
|
|
||||||
if showObj.nextaired:
|
if showObj.nextaired:
|
||||||
dtEpisodeAirs = network_timezones.parse_date_time(showObj.nextaired, showDict['airs'], showDict['network'])
|
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(showObj.nextaired, showDict['airs'], showDict['network']))
|
||||||
showDict['airs'] = sbdatetime.sbdatetime.sbftime(dtEpisodeAirs, t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
|
showDict['airs'] = sbdatetime.sbdatetime.sbftime(dtEpisodeAirs, t_preset=timeFormat).lstrip('0').replace(' 0', ' ')
|
||||||
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
||||||
else:
|
else:
|
||||||
@ -2320,7 +2320,7 @@ class CMD_ShowSeasons(ApiCall):
|
|||||||
status, quality = Quality.splitCompositeStatus(int(row["status"]))
|
status, quality = Quality.splitCompositeStatus(int(row["status"]))
|
||||||
row["status"] = _get_status_Strings(status)
|
row["status"] = _get_status_Strings(status)
|
||||||
row["quality"] = _get_quality_string(quality)
|
row["quality"] = _get_quality_string(quality)
|
||||||
dtEpisodeAirs = network_timezones.parse_date_time(row['airdate'],showObj.airs,showObj.network)
|
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(row['airdate'],showObj.airs,showObj.network))
|
||||||
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
||||||
curSeason = int(row["season"])
|
curSeason = int(row["season"])
|
||||||
curEpisode = int(row["episode"])
|
curEpisode = int(row["episode"])
|
||||||
@ -2343,7 +2343,7 @@ class CMD_ShowSeasons(ApiCall):
|
|||||||
status, quality = Quality.splitCompositeStatus(int(row["status"]))
|
status, quality = Quality.splitCompositeStatus(int(row["status"]))
|
||||||
row["status"] = _get_status_Strings(status)
|
row["status"] = _get_status_Strings(status)
|
||||||
row["quality"] = _get_quality_string(quality)
|
row["quality"] = _get_quality_string(quality)
|
||||||
dtEpisodeAirs = network_timezones.parse_date_time(row['airdate'], showObj.airs, showObj.network)
|
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(row['airdate'], showObj.airs, showObj.network))
|
||||||
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
row['airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
||||||
if not curEpisode in seasons:
|
if not curEpisode in seasons:
|
||||||
seasons[curEpisode] = {}
|
seasons[curEpisode] = {}
|
||||||
@ -2588,7 +2588,7 @@ class CMD_Shows(ApiCall):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if curShow.nextaired:
|
if curShow.nextaired:
|
||||||
dtEpisodeAirs = network_timezones.parse_date_time(curShow.nextaired, curShow.airs, showDict['network'])
|
dtEpisodeAirs = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(curShow.nextaired, curShow.airs, showDict['network']))
|
||||||
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
showDict['next_ep_airdate'] = sbdatetime.sbdatetime.sbfdate(dtEpisodeAirs, d_preset=dateFormat)
|
||||||
else:
|
else:
|
||||||
showDict['next_ep_airdate'] = ''
|
showDict['next_ep_airdate'] = ''
|
||||||
|
@ -45,6 +45,7 @@ from sickbeard import naming
|
|||||||
from sickbeard import scene_exceptions
|
from sickbeard import scene_exceptions
|
||||||
from sickbeard import subtitles
|
from sickbeard import subtitles
|
||||||
from sickbeard import network_timezones
|
from sickbeard import network_timezones
|
||||||
|
from sickbeard import sbdatetime
|
||||||
|
|
||||||
from sickbeard.providers import newznab, rsstorrent
|
from sickbeard.providers import newznab, rsstorrent
|
||||||
from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStrings, cpu_presets
|
from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStrings, cpu_presets
|
||||||
@ -392,8 +393,8 @@ class MainHandler(RequestHandler):
|
|||||||
|
|
||||||
# add localtime to the dict
|
# add localtime to the dict
|
||||||
for index, item in enumerate(sql_results):
|
for index, item in enumerate(sql_results):
|
||||||
sql_results[index]['localtime'] = network_timezones.parse_date_time(item['airdate'], item['airs'],
|
sql_results[index]['localtime'] = sbdatetime.sbdatetime.convert_to_setting(network_timezones.parse_date_time(item['airdate'],
|
||||||
item['network'])
|
item['airs'], item['network']))
|
||||||
|
|
||||||
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
|
sql_results.sort(sorts[sickbeard.COMING_EPS_SORT])
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user