Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Relaxing channel name check in Xtream API #178

Merged
merged 27 commits into from
Dec 8, 2021
Merged
Changes from 1 commit
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
293963b
Added Initial XTream
superolmo May 28, 2021
a1dca48
Added XTream Series
superolmo Jun 2, 2021
d0be0a5
Added check for local logo_path
superolmo Jun 4, 2021
a84f7ce
Back to fixed path
superolmo Jun 4, 2021
b97598a
Added pyxtream choice
superolmo Jun 5, 2021
0530fc9
Replaced the test server
superolmo Jun 6, 2021
b56a6fb
Replaced the test server
superolmo Jun 6, 2021
ab11dfd
Fixed cache-path and added regex search
superolmo Jun 7, 2021
5d4a971
Merge branch 'master' of github.com:superolmo/hypnotix
superolmo Jun 7, 2021
2b8d127
Merge branch 'master' of https://github.com/linuxmint/hypnotix into l…
superolmo Jun 7, 2021
b38d61e
Changed osp back to os.path
superolmo Jun 7, 2021
7db1d62
Changed osp back to os.path
superolmo Jun 7, 2021
06517d6
Merge branch 'linuxmin-master'
superolmo Jun 7, 2021
2a45eb1
Fixed bug in the way it reload from cache
superolmo Jun 12, 2021
e79a848
Fixed missing provider when it doesn't load
superolmo Jun 18, 2021
dcbb6a1
Improved handling of missing keys
superolmo Jun 18, 2021
97b9e73
Fixed Categories and cleaned up the code
superolmo Jun 18, 2021
32af21f
Updated function names to follow PEP8
superolmo Jun 28, 2021
51e6d1d
Added check before authorizing
superolmo Sep 27, 2021
e6390d9
Merge remote-tracking branch 'upstream/master'
superolmo Sep 27, 2021
fca44f2
Scale down changes
superolmo Sep 27, 2021
0b41feb
Revert some more changes
superolmo Sep 27, 2021
215e191
Revert last changes
superolmo Sep 27, 2021
76bd0f2
Revert flag name
superolmo Sep 27, 2021
211db79
Discard streams w/o name, change live radio type to live stream
superolmo Nov 5, 2021
c8577a2
Rebase to upstream master
superolmo Nov 28, 2021
a7d447c
Fix subgroup name check
superolmo Dec 1, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fixed Categories and cleaned up the code
  • Loading branch information
superolmo committed Jun 18, 2021
commit 97b9e73e2a2f8d2547103f43e0257e3ab5e1c372
132 changes: 51 additions & 81 deletions usr/lib/hypnotix/xtream.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
Module handles downloading xtream data
It does not support M3U

This application comes from the pyxtream library found at:
https://pypi.org/project/pyxtream

Part of this content comes from
https://github.com/chazlarson/py-xtream-codes/blob/master/xtream.py
https://github.com/linuxmint/hypnotix
Expand All @@ -17,7 +20,7 @@
__author__ = 'Claudio Olmi'

from typing import List
import requests
import requests
import time
from os import path as osp
from os import makedirs
Expand Down Expand Up @@ -125,8 +128,6 @@ class Group():
# Required by Hypnotix
name = ""
group_type = ""
channels = []
series = []

# XTream
group_id = ""
Expand All @@ -138,6 +139,9 @@ def __init__(self, group_info: dict, stream_type: str):
# Raw JSON Group
self.raw = group_info

self.channels = []
self.series = []

TV_GROUP, MOVIES_GROUP, SERIES_GROUP = range(3)

if "VOD" == stream_type:
Expand All @@ -161,7 +165,6 @@ class Episode():
# Required by Hypnotix
title = ""
name = ""


# XTream

Expand Down Expand Up @@ -201,8 +204,6 @@ class Serie():
name = ""
logo = ""
logo_path = ""
seasons = []
episodes = []

# XTream
series_id = ""
Expand All @@ -222,6 +223,9 @@ def __init__(self, xtream: object, series_info):
self.logo = series_info['cover']
self.logo_path = xtream.getLogoLocalPath(self.logo)

self.seasons = []
self.episodes = []

# Check if category_id key is available
if "series_id" in series_info.keys():
self.series_id = series_info['series_id']
Expand All @@ -241,10 +245,10 @@ def __init__(self, xtream: object, series_info):
class Season():
# Required by Hypnotix
name = ""
episodes = {}

def __init__(self, name):
self.name = name
self.episodes = {}

class XTream():

Expand Down Expand Up @@ -273,6 +277,9 @@ class XTream():
},
liveType
)
# If the cached JSON file is older than threshold_time_sec then load a new
# JSON dictionary from the provider
threshold_time_sec = 60*60*8

def __init__(self, provider_name: str, provider_username: str, provider_password: str, provider_url: str, cache_path: str = ""):
"""Initialize Xtream Class
Expand All @@ -289,7 +296,6 @@ def __init__(self, provider_name: str, provider_username: str, provider_password
self.password = provider_password
self.name = provider_name
self.cache_path = cache_path

# if the cache_path is specified, test that it is a directory
if self.cache_path != "":
# If the cache_path is not a directory, clear it
Expand Down Expand Up @@ -318,66 +324,30 @@ def search_stream(self, keyword: str, return_type: str = "LIST") -> List:

search_result = []

regex = re.compile(keyword)
regex = re.compile(keyword,re.IGNORECASE)

print("Checking {} movies".format(len(self.movies)))
for stream in self.movies:
if re.match(regex, stream.name) is not None:
search_result.append(stream.export_json())

print("Checking {} channels".format(len(self.channels)))
for stream in self.channels:
if re.match(regex, stream.name) is not None:
search_result.append(stream.export_json())

print("Checking {} series".format(len(self.series)))
for stream in self.series:
if re.match(regex, stream.name) is not None:
search_result.append(stream.export_json())

if return_type == "JSON":
if search_result != None:
print("Found {} results".format(len(search_result)))
print("Found {} results `{}`".format(len(search_result),keyword))
return json.dumps(search_result, ensure_ascii=False)
else:
return search_result

def download_video(self, url: str, fullpath_filename: str) -> bool:
"""Download a stream

Args:
url (str): Complete URL of the stream
fullpath_filename (str): Complete File path where to save the stream

Returns:
bool: True if successful, False if error
"""
ret_code = False
mb_size = 1024*1024
try:
print("Downloading from URL `{}` and saving at `{}`".format(url,fullpath_filename))
response = requests.get(url, timeout=(5), stream=True)
print("Got response")
# If there is an answer from the remote server
if response.status_code == 200:
print("Got response 200")
# Set downloaded size
downloaded_bytes = 0
# Get total playlist byte size
total_content_size = int(response.headers['content-length'])
total_content_size_mb = total_content_size/mb_size
# Set stream blocks
block_bytes = int(4*mb_size) # 4 MB

#response.encoding = response.apparent_encoding
print("Ready to download {:.1f} MB file".format(total_content_size_mb))
with open(fullpath_filename, "w") as file:
# Grab data by block_bytes
for data in response.iter_content(block_bytes,decode_unicode=True):
downloaded_bytes += block_bytes
print("{:.0f}/{:.1f} MB downloaded".format(downloaded_bytes/mb_size,total_content_size_mb))
file.write(str(data))
if downloaded_bytes < total_content_size:
print("The file size is incorrect, deleting")
remove(fullpath_filename)
else:
# Set the datatime when it was last retreived
# self.settings.set_
ret_code = True
else:
print("HTTP error %d while retrieving from %s!" % (response.status_code, url))
except Exception as e:
print(e)

return ret_code

def slugify(self, string: str) -> str:
"""Normalize string
Expand Down Expand Up @@ -464,14 +434,13 @@ def loadFromFile(self, filename) -> dict:
if osp.isfile(full_filename):

my_data = None
threshold_time = 60*60*8

# Get the enlapsed seconds since last file update
diff_time = time.time() - osp.getmtime(full_filename)
# If the file was updated less than the threshold time,
# it means that the file is still fresh, we can load it.
# Otherwise skip and return None to force a re-download
if threshold_time > diff_time:
if self.threshold_time_sec > diff_time:
# Load the JSON data
try:
with open(full_filename,mode='r',encoding='utf-8') as myfile:
Expand Down Expand Up @@ -562,6 +531,7 @@ def load_iptv(self):
#provider.groups.append(new_group)
else:
print("Could not load {} Groups".format(loading_stream_type))
break

## Get Streams

Expand Down Expand Up @@ -590,13 +560,26 @@ def load_iptv(self):
for stream_channel in all_streams:
# Generate Group Title
if stream_channel['name'][0].isalnum():
group_title = str.split(stream_channel['name'],'|')[0]

# Some channels have no group,
# so let's add them to the catche all group
if stream_channel['category_id'] == None:
stream_channel['category_id'] = '9999'
elif stream_channel['category_id'] != '1':
pass

# Find the first occurence of the group that the
# Channel or Stream is pointing to
the_group = next(
(x for x in self.groups if x.group_id == stream_channel['category_id']),
None
)

if the_group != None:
group_title = the_group.name
else:
group_title = self.catch_all_group.name

if loading_stream_type == self.seriesType:
# Load all Series
new_series = Serie(self, stream_channel)
Expand All @@ -613,35 +596,22 @@ def load_iptv(self):
stream_channel
)

# Find the first occurence of the group that the
# Channel or Stream is pointing to
the_group = next(
(x for x in self.groups if x.group_id == stream_channel['category_id']),
None
)

# Save the new channel to the provider object and the new_group object
# Save the new channel to the local list of channels
if loading_stream_type == self.liveType:
self.channels.append(new_channel)
#provider.channels.append(new_channel)
elif loading_stream_type == self.vodType:
self.movies.append(new_channel)
#provider.movies.append(new_channel)
else:
self.series.append(new_series)
#provider.series.append(new_series)

if loading_stream_type != self.seriesType:
#self.channels.append(new_channel)
if the_group != None:

# Add stream to the specific Group
if the_group != None:
if loading_stream_type != self.seriesType:
the_group.channels.append(new_channel)
else:
print("Group not found `{}`".format(stream_channel['name']))
else:
if the_group != None:
the_group.series.append(new_series)
else:
print("Group not found `{}`".format(stream_channel['name']))
else:
print("Group not found `{}`".format(stream_channel['name']))
else:
print("Could not load {} Streams".format(loading_stream_type))

Expand Down