mirror of
https://github.com/moparisthebest/SickRage
synced 2025-01-06 03:18:01 -05:00
if nothing has changed don't execute the transaction for network table and don't reload the table
Merge pull request #659 from Prinz23/network_update_fix2 don't call unnecessary 2 functions in network_timezones Check that timezone is not None, in that case return Sickbeard Timezone Merge pull request #662 from Prinz23/network_none_fix Check that timezone is not None, in that case return Sickbeard Timezone
This commit is contained in:
parent
3bdebb4e6d
commit
6adbdb5e86
@ -28,7 +28,9 @@ import re
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# regex to parse time (12/24 hour format)
|
# regex to parse time (12/24 hour format)
|
||||||
time_regex = re.compile(r"(\d{1,2}):(\d{2,2})( [PA]M)?\b", flags=re.IGNORECASE)
|
time_regex = re.compile(r"(\d{1,2})(([:.](\d{2,2}))? ?([PA][. ]? ?M)|[:.](\d{2,2}))\b", flags=re.IGNORECASE)
|
||||||
|
am_regex = re.compile(r"(A[. ]? ?M)", flags=re.IGNORECASE)
|
||||||
|
pm_regex = re.compile(r"(P[. ]? ?M)", flags=re.IGNORECASE)
|
||||||
|
|
||||||
network_dict = None
|
network_dict = None
|
||||||
|
|
||||||
@ -37,7 +39,7 @@ sb_timezone = tz.tzlocal()
|
|||||||
# helper to remove failed temp download
|
# helper to remove failed temp download
|
||||||
def _remove_zoneinfo_failed(filename):
|
def _remove_zoneinfo_failed(filename):
|
||||||
try:
|
try:
|
||||||
os.remove(filename)
|
ek.ek(os.remove,filename)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -56,7 +58,7 @@ def _remove_old_zoneinfo():
|
|||||||
file_w_path = ek.ek(join,path,filename)
|
file_w_path = ek.ek(join,path,filename)
|
||||||
if file_w_path != cur_file and ek.ek(isfile,file_w_path):
|
if file_w_path != cur_file and ek.ek(isfile,file_w_path):
|
||||||
try:
|
try:
|
||||||
os.remove(file_w_path)
|
ek.ek(os.remove,file_w_path)
|
||||||
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
logger.log(u"Delete unneeded old zoneinfo File: " + file_w_path)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + file_w_path,logger.ERROR)
|
logger.log(u"Unable to delete: " + file_w_path,logger.ERROR)
|
||||||
@ -68,7 +70,7 @@ def _update_zoneinfo():
|
|||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
|
|
||||||
# now check if the zoneinfo needs update
|
# now check if the zoneinfo needs update
|
||||||
url_zv = 'http://github.com/Prinz23/sb_network_timezones/raw/master/zoneinfo.txt'
|
url_zv = 'https://github.com/Prinz23/sb_network_timezones/raw/master/zoneinfo.txt'
|
||||||
|
|
||||||
url_data = helpers.getURL(url_zv)
|
url_data = helpers.getURL(url_zv)
|
||||||
|
|
||||||
@ -87,13 +89,13 @@ def _update_zoneinfo():
|
|||||||
return
|
return
|
||||||
|
|
||||||
# now load the new zoneinfo
|
# now load the new zoneinfo
|
||||||
url_tar = u'http://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
|
url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
|
||||||
zonefile = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + new_zoneinfo)
|
zonefile = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + new_zoneinfo)
|
||||||
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
|
zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)
|
||||||
|
|
||||||
if (os.path.exists(zonefile_tmp)):
|
if (ek.ek(os.path.exists,zonefile_tmp)):
|
||||||
try:
|
try:
|
||||||
os.remove(zonefile_tmp)
|
ek.ek(os.remove,zonefile_tmp)
|
||||||
except:
|
except:
|
||||||
logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
|
logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
|
||||||
return
|
return
|
||||||
@ -109,10 +111,10 @@ def _update_zoneinfo():
|
|||||||
# remove the old zoneinfo file
|
# remove the old zoneinfo file
|
||||||
if (cur_zoneinfo is not None):
|
if (cur_zoneinfo is not None):
|
||||||
old_file = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + cur_zoneinfo)
|
old_file = ek.ek(realpath, u'lib/dateutil/zoneinfo/' + cur_zoneinfo)
|
||||||
if (os.path.exists(old_file)):
|
if (ek.ek(os.path.exists,old_file)):
|
||||||
os.remove(old_file)
|
ek.ek(os.remove,old_file)
|
||||||
# rename downloaded file
|
# rename downloaded file
|
||||||
os.rename(zonefile_tmp,zonefile)
|
ek.ek(os.rename,zonefile_tmp,zonefile)
|
||||||
# load the new zoneinfo
|
# load the new zoneinfo
|
||||||
reload(lib.dateutil.zoneinfo)
|
reload(lib.dateutil.zoneinfo)
|
||||||
sb_timezone = tz.tzlocal()
|
sb_timezone = tz.tzlocal()
|
||||||
@ -133,7 +135,7 @@ def update_network_dict():
|
|||||||
d = {}
|
d = {}
|
||||||
|
|
||||||
# network timezones are stored on github pages
|
# network timezones are stored on github pages
|
||||||
url = 'http://github.com/Prinz23/sb_network_timezones/raw/master/network_timezones.txt'
|
url = 'https://github.com/Prinz23/sb_network_timezones/raw/master/network_timezones.txt'
|
||||||
|
|
||||||
url_data = helpers.getURL(url)
|
url_data = helpers.getURL(url)
|
||||||
|
|
||||||
@ -173,8 +175,9 @@ def update_network_dict():
|
|||||||
L = list(va for va in old_d)
|
L = list(va for va in old_d)
|
||||||
ql.append(["DELETE FROM network_timezones WHERE network_name IN ("+','.join(['?'] * len(L))+")", L])
|
ql.append(["DELETE FROM network_timezones WHERE network_name IN ("+','.join(['?'] * len(L))+")", L])
|
||||||
# change all network timezone infos at once (much faster)
|
# change all network timezone infos at once (much faster)
|
||||||
myDB.mass_action(ql)
|
if len(ql) > 0:
|
||||||
load_network_dict()
|
myDB.mass_action(ql)
|
||||||
|
load_network_dict()
|
||||||
|
|
||||||
# load network timezones from db into dict
|
# load network timezones from db into dict
|
||||||
def load_network_dict():
|
def load_network_dict():
|
||||||
@ -197,7 +200,10 @@ def get_network_timezone(network, network_dict):
|
|||||||
return sb_timezone
|
return sb_timezone
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return tz.gettz(network_dict[network])
|
if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
|
||||||
|
return tz.gettz(network_dict[network])
|
||||||
|
else:
|
||||||
|
return sb_timezone
|
||||||
except:
|
except:
|
||||||
return sb_timezone
|
return sb_timezone
|
||||||
|
|
||||||
@ -206,20 +212,28 @@ def parse_date_time(d, t, network):
|
|||||||
if network_dict is None:
|
if network_dict is None:
|
||||||
load_network_dict()
|
load_network_dict()
|
||||||
mo = time_regex.search(t)
|
mo = time_regex.search(t)
|
||||||
if mo is not None and len(mo.groups()) >= 2:
|
if mo is not None and len(mo.groups()) >= 5:
|
||||||
try:
|
if mo.group(5) is not None:
|
||||||
hr = helpers.tryInt(mo.group(1))
|
try:
|
||||||
m = helpers.tryInt(mo.group(2))
|
hr = helpers.tryInt(mo.group(1))
|
||||||
ap = mo.group(3)
|
m = helpers.tryInt(mo.group(4))
|
||||||
# convert am/pm to 24 hour clock
|
ap = mo.group(5)
|
||||||
if ap is not None:
|
# convert am/pm to 24 hour clock
|
||||||
if ap.lower() == u" pm" and hr != 12:
|
if ap is not None:
|
||||||
hr += 12
|
if pm_regex.search(ap) is not None and hr != 12:
|
||||||
elif ap.lower() == u" am" and hr == 12:
|
hr += 12
|
||||||
hr -= 12
|
elif am_regex.search(ap) is not None and hr == 12:
|
||||||
except:
|
hr -= 12
|
||||||
hr = 0
|
except:
|
||||||
m = 0
|
hr = 0
|
||||||
|
m = 0
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
hr = helpers.tryInt(mo.group(1))
|
||||||
|
m = helpers.tryInt(mo.group(6))
|
||||||
|
except:
|
||||||
|
hr = 0
|
||||||
|
m = 0
|
||||||
else:
|
else:
|
||||||
hr = 0
|
hr = 0
|
||||||
m = 0
|
m = 0
|
||||||
|
Loading…
Reference in New Issue
Block a user