Skip to content

Commit e3e928b

Browse files
Arrowargithub-actions[bot]
authored andcommitted
Bump v3.3.6
1 parent d480fce commit e3e928b

File tree

19 files changed

+213
-156
lines changed

19 files changed

+213
-156
lines changed

.github/.domain/domains.json

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,10 @@
3636
"time_change": "2025-09-06 18:24:29"
3737
},
3838
"streamingcommunity": {
39-
"domain": "cz",
40-
"full_url": "https://streamingcommunityz.cz/",
41-
"old_domain": "video",
42-
"time_change": "2025-09-22 14:20:44"
39+
"domain": "at",
40+
"full_url": "https://streamingcommunityz.at/",
41+
"old_domain": "cz",
42+
"time_change": "2025-09-29 11:16:51"
4343
},
4444
"altadefinizionegratis": {
4545
"domain": "ist",

StreamingCommunity/Api/Site/altadefinizione/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import sys
44
import subprocess
5-
from urllib.parse import quote_plus
65

76

87
# External library

StreamingCommunity/Api/Site/altadefinizione/series.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,10 @@ def download_episode(index_season_selected: int, scrape_serie: GetSerieInfo, dow
109109
episodes = scrape_serie.getEpisodeSeasons(index_season_selected)
110110
episodes_count = len(episodes)
111111

112+
if episodes_count == 0:
113+
console.print(f"[red]No episodes found for season {index_season_selected}")
114+
return
115+
112116
if download_all:
113117
for i_episode in range(1, episodes_count + 1):
114118
path, stopped = download_video(index_season_selected, i_episode, scrape_serie)

StreamingCommunity/Api/Site/crunchyroll/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import sys
44
import subprocess
5-
from urllib.parse import quote_plus
65

76

87
# External library

StreamingCommunity/Api/Site/crunchyroll/series.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,10 @@ def download_episode(index_season_selected: int, scrape_serie: GetSerieInfo, dow
118118
episodes = scrape_serie.getEpisodeSeasons(index_season_selected)
119119
episodes_count = len(episodes)
120120

121+
if episodes_count == 0:
122+
console.print(f"[red]No episodes found for season {index_season_selected}")
123+
return
124+
121125
if download_all:
122126
for i_episode in range(1, episodes_count + 1):
123127
path, stopped = download_video(index_season_selected, i_episode, scrape_serie)

StreamingCommunity/Api/Site/guardaserie/series.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,10 @@ def download_episode(scape_info_serie: GetSerieInfo, index_season_selected: int,
9696
episodes = scape_info_serie.get_episode_number(index_season_selected)
9797
episodes_count = len(episodes)
9898

99+
if episodes_count == 0:
100+
console.print(f"[red]No episodes found for season {index_season_selected}")
101+
return
102+
99103
if download_all:
100104

101105
# Download all episodes in the season

StreamingCommunity/Api/Site/mediasetinfinity/series.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,10 @@ def download_episode(index_season_selected: int, scrape_serie: GetSerieInfo, dow
109109
episodes = scrape_serie.getEpisodeSeasons(index_season_selected)
110110
episodes_count = len(episodes)
111111

112+
if episodes_count == 0:
113+
console.print(f"[red]No episodes found for season {index_season_selected}")
114+
return
115+
112116
if download_all:
113117
for i_episode in range(1, episodes_count + 1):
114118
path, stopped = download_video(index_season_selected, i_episode, scrape_serie)

StreamingCommunity/Api/Site/mediasetinfinity/site.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import os
44
import sys
5+
from datetime import datetime
56

67

78
# External libraries
@@ -118,13 +119,22 @@ def title_search(query: str) -> int:
118119
if page_url and page_url.startswith('//'):
119120
page_url = f"https:{page_url}"
120121

122+
date = item.get('year', '')
123+
if not date and item.get('updated'):
124+
try:
125+
126+
timestamp_ms = int(item.get('updated', 0))
127+
date = datetime.fromtimestamp(timestamp_ms / 1000).year
128+
except (ValueError, TypeError):
129+
date = ''
130+
121131
media_search_manager.add_media({
122132
'id': item.get('guid', '') or item.get('_id', ''),
123133
'name': item.get('title', ''),
124134
'type': media_type,
125135
'url': page_url,
126136
'image': next(iter(item.get('thumbnails', {}).values()), {}).get('url', ''),
127-
'date': item.get('year', ''),
137+
'date': date,
128138
})
129139

130140
return media_search_manager.get_length()

StreamingCommunity/Api/Site/mediasetinfinity/util/ScrapeSerie.py

Lines changed: 66 additions & 98 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,15 @@
55

66

77
# External libraries
8-
from curl_cffi import requests
8+
import httpx
99
from bs4 import BeautifulSoup
1010

1111

1212
# Internal utilities
13-
from StreamingCommunity.Util.headers import get_headers, get_userAgent
1413
from StreamingCommunity.Util.config_json import config_manager
14+
from StreamingCommunity.Util.headers import get_headers, get_userAgent
1515
from StreamingCommunity.Api.Player.Helper.Vixcloud.util import SeasonManager
1616

17-
1817
# Variable
1918
max_timeout = config_manager.get_int("REQUESTS", "timeout")
2019

@@ -36,58 +35,26 @@ def __init__(self, url):
3635
self.stagioni_disponibili = []
3736

3837
def _extract_serie_id(self):
39-
"""Estrae l'ID della serie dall'URL di partenza"""
38+
"""Extract the series ID from the starting URL"""
4039
self.serie_id = f"SE{self.url.split('SE')[1]}"
41-
print(f"Serie ID: {self.serie_id}")
4240
return self.serie_id
4341

4442
def _get_public_id(self):
43+
"""Get the public ID for API calls"""
4544
self.public_id = "PR1GhC"
4645
return self.public_id
47-
48-
"""
49-
bearer_token = get_bearer_token()
50-
headers = {
51-
'authorization': f'Bearer {bearer_token}',
52-
'user-agent': get_userAgent(),
53-
}
54-
55-
response = requests.get(
56-
'https://api-ott-prod-fe.mediaset.net/PROD/play/userlist/watchlist/v2.0',
57-
headers=headers,
58-
impersonate="chrome",
59-
allow_redirects=True
60-
)
61-
62-
if response.status_code == 401:
63-
print("Token scaduto, rinnovare il token")
64-
65-
if response.status_code == 200:
66-
data = response.json()
67-
self.public_id = data['response']['entries'][0]['media'][0]['publicUrl'].split("/")[4]
68-
print(f"Public id: {self.public_id}")
69-
return self.public_id
70-
71-
else:
72-
logging.error(f"Failed to get public ID: {response.status_code}")
73-
return None
74-
"""
7546

7647
def _get_series_data(self):
77-
"""Ottiene i dati della serie tramite l'API"""
78-
headers = {
79-
'User-Agent': get_userAgent(),
80-
}
48+
"""Get series data through the API"""
49+
headers = {'User-Agent': get_userAgent()}
8150
params = {'byGuid': self.serie_id}
8251

83-
response = requests.get(
84-
f'https://feed.entertainment.tv.theplatform.eu/f/{self.public_id}/mediaset-prod-all-series-v2',
85-
params=params,
86-
headers=headers,
87-
impersonate="chrome",
88-
allow_redirects=True
89-
)
90-
print("Risposta per _get_series_data:", response.status_code)
52+
with httpx.Client(timeout=max_timeout, follow_redirects=True) as client:
53+
response = client.get(
54+
f'https://feed.entertainment.tv.theplatform.eu/f/{self.public_id}/mediaset-prod-all-series-v2',
55+
params=params,
56+
headers=headers
57+
)
9158

9259
if response.status_code == 200:
9360
return response.json()
@@ -96,7 +63,7 @@ def _get_series_data(self):
9663
return None
9764

9865
def _process_available_seasons(self, data):
99-
"""Processa le stagioni disponibili dai dati della serie"""
66+
"""Process available seasons from series data"""
10067
if not data or not data.get('entries'):
10168
logging.error("No series data found")
10269
return []
@@ -122,13 +89,13 @@ def _process_available_seasons(self, data):
12289
else:
12390
logging.warning(f"Season URL not found: {url}")
12491

125-
# Ordina le stagioni dalla più vecchia alla più nuova
92+
# Sort seasons from oldest to newest
12693
stagioni_disponibili.sort(key=lambda s: s['tvSeasonNumber'])
12794

12895
return stagioni_disponibili
12996

13097
def _build_season_page_urls(self, stagioni_disponibili):
131-
"""Costruisce gli URL delle pagine delle stagioni"""
98+
"""Build season page URLs"""
13299
parsed_url = urlparse(self.url)
133100
base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
134101
series_slug = parsed_url.path.strip('/').split('/')[-1].split('_')[0]
@@ -138,39 +105,36 @@ def _build_season_page_urls(self, stagioni_disponibili):
138105
season['page_url'] = page_url
139106

140107
def _extract_season_sb_ids(self, stagioni_disponibili):
141-
"""Estrae gli ID sb dalle pagine delle stagioni"""
142-
for season in stagioni_disponibili:
143-
response_page = requests.get(
144-
season['page_url'],
145-
headers={'User-Agent': get_userAgent()},
146-
impersonate="chrome",
147-
allow_redirects=True
148-
)
149-
print("Risposta per _extract_season_sb_ids:", response_page.status_code, " index season:", season['tvSeasonNumber'])
150-
151-
soup = BeautifulSoup(response_page.text, 'html.parser')
152-
153-
# Prova prima con 'Episodi', poi con 'Puntate intere'
154-
link = soup.find('a', string='Episodi')
155-
if not link:
156-
#print("Using word: Puntate intere")
157-
link = soup.find('a', string='Puntate intere')
158-
159-
if link and link.has_attr('href'):
160-
if not link.string == 'Puntate intere':
161-
print("Using word: Episodi")
162-
season['sb'] = link['href'].split(',')[-1]
163-
else:
164-
logging.warning(f"Link 'Episodi' o 'Puntate intere' non trovato per stagione {season['tvSeasonNumber']}")
108+
"""Extract sb IDs from season pages"""
109+
with httpx.Client(timeout=max_timeout, follow_redirects=True) as client:
110+
for season in stagioni_disponibili:
111+
response_page = client.get(
112+
season['page_url'],
113+
headers={'User-Agent': get_userAgent()}
114+
)
115+
print("Response for _extract_season_sb_ids:", response_page.status_code, " season index:", season['tvSeasonNumber'])
116+
117+
soup = BeautifulSoup(response_page.text, 'html.parser')
118+
119+
# Try first with 'Episodi', then with 'Puntate intere'
120+
link = soup.find('a', string='Episodi')
121+
if not link:
122+
#print("Using word: Puntate intere")
123+
link = soup.find('a', string='Puntate intere')
124+
125+
if link and link.has_attr('href'):
126+
if not link.string == 'Puntate intere':
127+
print("Using word: Episodi")
128+
season['sb'] = link['href'].split(',')[-1]
129+
else:
130+
logging.warning(f"Link 'Episodi' or 'Puntate intere' not found for season {season['tvSeasonNumber']}")
165131

166132
def _get_season_episodes(self, season):
167-
"""Ottiene gli episodi per una stagione specifica"""
133+
"""Get episodes for a specific season"""
168134
if not season.get('sb'):
169135
return
170136

171137
episode_headers = {
172-
'origin': 'https://mediasetinfinity.mediaset.it',
173-
'referer': 'https://mediasetinfinity.mediaset.it/',
174138
'user-agent': get_userAgent(),
175139
}
176140
params = {
@@ -180,9 +144,8 @@ def _get_season_episodes(self, season):
180144
}
181145
episode_url = f"https://feed.entertainment.tv.theplatform.eu/f/{self.public_id}/mediaset-prod-all-programs-v2"
182146

183-
episode_response = requests.get(episode_url, headers=episode_headers, params=params, impersonate="chrome"
184-
, allow_redirects=True)
185-
print("Risposta per _get_season_episodes:", episode_response.status_code)
147+
with httpx.Client(timeout=max_timeout, follow_redirects=True) as client:
148+
episode_response = client.get(episode_url, headers=episode_headers, params=params)
186149

187150
if episode_response.status_code == 200:
188151
episode_data = episode_response.json()
@@ -193,7 +156,8 @@ def _get_season_episodes(self, season):
193156
'id': entry.get('guid'),
194157
'title': entry.get('title'),
195158
'duration': int(entry.get('mediasetprogram$duration', 0) / 60) if entry.get('mediasetprogram$duration') else 0,
196-
'url': entry.get('media', [{}])[0].get('publicUrl') if entry.get('media') else None
159+
'url': entry.get('media', [{}])[0].get('publicUrl') if entry.get('media') else None,
160+
'name': entry.get('title')
197161
}
198162
season['episodes'].append(episode_info)
199163

@@ -243,22 +207,22 @@ def collect_season(self) -> None:
243207
logging.error(f"Error in collect_season: {str(e)}")
244208

245209
def _populate_seasons_manager(self):
246-
"""Popola il seasons_manager con i dati raccolti"""
210+
"""Populate the seasons_manager with collected data - ONLY for seasons with episodes"""
211+
seasons_with_episodes = 0
212+
247213
for season_data in self.stagioni_disponibili:
248-
season_obj = self.seasons_manager.add_season({
249-
'number': season_data['tvSeasonNumber'],
250-
'name': f"Stagione {season_data['tvSeasonNumber']}"
251-
})
252214

253-
if season_obj and season_data.get('episodes'):
254-
for idx, episode in enumerate(season_data['episodes'], 1):
255-
season_obj.episodes.add({
256-
'id': episode['id'],
257-
'number': idx,
258-
'name': episode['title'],
259-
'url': episode['url'],
260-
'duration': episode['duration']
261-
})
215+
# Add season to manager ONLY if it has episodes
216+
if season_data.get('episodes') and len(season_data['episodes']) > 0:
217+
season_obj = self.seasons_manager.add_season({
218+
'number': season_data['tvSeasonNumber'],
219+
'name': f"Season {season_data['tvSeasonNumber']}"
220+
})
221+
222+
if season_obj:
223+
for episode in season_data['episodes']:
224+
season_obj.episodes.add(episode)
225+
seasons_with_episodes += 1
262226

263227
# ------------- FOR GUI -------------
264228
def getNumberSeason(self) -> int:
@@ -276,10 +240,14 @@ def getEpisodeSeasons(self, season_number: int) -> list:
276240
"""
277241
if not self.seasons_manager.seasons:
278242
self.collect_season()
279-
280-
# Get season directly by its number
281-
season = self.seasons_manager.get_season_by_number(season_number)
282-
return season.episodes.episodes if season else []
243+
244+
# Convert 1-based user input to 0-based array index
245+
season_index = season_number - 1
246+
247+
# Get season by index in the available seasons list
248+
season = self.seasons_manager.seasons[season_index]
249+
250+
return season.episodes.episodes
283251

284252
def selectEpisode(self, season_number: int, episode_index: int) -> dict:
285253
"""
@@ -290,4 +258,4 @@ def selectEpisode(self, season_number: int, episode_index: int) -> dict:
290258
logging.error(f"Episode index {episode_index} is out of range for season {season_number}")
291259
return None
292260

293-
return episodes[episode_index]
261+
return episodes[episode_index]

StreamingCommunity/Api/Site/raiplay/series.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,10 @@ def download_episode(index_season_selected: int, scrape_serie: GetSerieInfo, dow
135135
episodes = scrape_serie.getEpisodeSeasons(index_season_selected)
136136
episodes_count = len(episodes)
137137

138+
if episodes_count == 0:
139+
console.print(f"[red]No episodes found for season {index_season_selected}")
140+
return
141+
138142
if download_all:
139143
for i_episode in range(1, episodes_count + 1):
140144
path, stopped = download_video(index_season_selected, i_episode, scrape_serie)

0 commit comments

Comments
 (0)