Upload files to "/"
This commit is contained in:
commit
6eff8b22c2
1 changed files with 199 additions and 0 deletions
199
app.py
Normal file
199
app.py
Normal file
|
|
@ -0,0 +1,199 @@
|
||||||
|
from flask import Flask, render_template, request, jsonify, send_file
|
||||||
|
from io import BytesIO
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
import urllib.parse
|
||||||
|
import time
|
||||||
|
import random
|
||||||
|
import os
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
|
||||||
|
# Request settings
|
||||||
|
REQUEST_DELAY = 3.0 # seconds
|
||||||
|
last_request_time = 0
|
||||||
|
|
||||||
|
# Extended list of user agents
|
||||||
|
USER_AGENTS = [
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15',
|
||||||
|
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36',
|
||||||
|
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:89.0) Gecko/20100101 Firefox/89.0',
|
||||||
|
'Mozilla/5.0 (iPhone; CPU iPhone OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1',
|
||||||
|
'Mozilla/5.0 (iPad; CPU OS 14_6 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Mobile/15E148 Safari/604.1',
|
||||||
|
'Mozilla/5.0 (Linux; Android 11; SM-G975F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Mobile Safari/537.36',
|
||||||
|
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0',
|
||||||
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'
|
||||||
|
]
|
||||||
|
|
||||||
|
def polite_request(url):
|
||||||
|
global last_request_time
|
||||||
|
now = time.time()
|
||||||
|
elapsed = now - last_request_time
|
||||||
|
if elapsed < REQUEST_DELAY:
|
||||||
|
time.sleep(REQUEST_DELAY - elapsed)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'User-Agent': random.choice(USER_AGENTS),
|
||||||
|
'Referer': 'https://downloads.khinsider.com/',
|
||||||
|
'Accept-Language': 'en-US,en;q=0.5',
|
||||||
|
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||||
|
'Accept-Encoding': 'gzip, deflate, br'
|
||||||
|
}
|
||||||
|
|
||||||
|
proxies = {}
|
||||||
|
if os.getenv('HTTP_PROXY'):
|
||||||
|
proxies['http'] = os.getenv('HTTP_PROXY')
|
||||||
|
if os.getenv('HTTPS_PROXY'):
|
||||||
|
proxies['https'] = os.getenv('HTTPS_PROXY')
|
||||||
|
|
||||||
|
try:
|
||||||
|
with requests.Session() as session:
|
||||||
|
response = session.get(
|
||||||
|
url,
|
||||||
|
headers=headers,
|
||||||
|
proxies=proxies if proxies else None,
|
||||||
|
timeout=10
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
finally:
|
||||||
|
last_request_time = time.time()
|
||||||
|
|
||||||
|
@app.route('/')
|
||||||
|
def index():
|
||||||
|
return render_template('index.html')
|
||||||
|
|
||||||
|
@app.route("/search")
|
||||||
|
def search():
|
||||||
|
query = request.args.get("q", "").strip()
|
||||||
|
if not query or len(query) < 3:
|
||||||
|
return jsonify({"error": "Query must be at least 3 characters"}), 400
|
||||||
|
|
||||||
|
results = search_for_album(query)
|
||||||
|
return jsonify({
|
||||||
|
"results": results,
|
||||||
|
"count": len(results)
|
||||||
|
})
|
||||||
|
|
||||||
|
@app.route('/album')
|
||||||
|
def album():
|
||||||
|
album_url = request.args.get('url')
|
||||||
|
if not album_url:
|
||||||
|
return jsonify({'error': 'Missing album URL'}), 400
|
||||||
|
|
||||||
|
tracks = get_album_tracks(album_url)
|
||||||
|
return jsonify(tracks)
|
||||||
|
|
||||||
|
@app.route('/download')
|
||||||
|
def download():
|
||||||
|
track_url = request.args.get('url')
|
||||||
|
if not track_url:
|
||||||
|
return jsonify({'error': 'Missing track URL'}), 400
|
||||||
|
|
||||||
|
direct_url = get_download_url(track_url)
|
||||||
|
if not direct_url:
|
||||||
|
return jsonify({'error': 'Could not find download link'}), 404
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = polite_request(direct_url)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
buffer = BytesIO()
|
||||||
|
for chunk in response.iter_content(chunk_size=8192):
|
||||||
|
if chunk:
|
||||||
|
buffer.write(chunk)
|
||||||
|
buffer.seek(0)
|
||||||
|
|
||||||
|
filename = urllib.parse.unquote(direct_url.split('/')[-1])
|
||||||
|
return send_file(
|
||||||
|
buffer,
|
||||||
|
as_attachment=True,
|
||||||
|
download_name=filename,
|
||||||
|
mimetype='audio/mpeg' if filename.endswith('.mp3') else 'audio/flac',
|
||||||
|
conditional=False
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
def search_for_album(query):
|
||||||
|
try:
|
||||||
|
search_url = f"https://downloads.khinsider.com/search?search={urllib.parse.quote(query)}"
|
||||||
|
response = polite_request(search_url)
|
||||||
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
|
albums = []
|
||||||
|
|
||||||
|
for row in soup.select('table.albumList tr:not(:first-child)'):
|
||||||
|
album_link = row.select_one('td:nth-of-type(2) a')
|
||||||
|
if not album_link:
|
||||||
|
continue
|
||||||
|
|
||||||
|
album_name = album_link.text.strip()
|
||||||
|
album_url = f"https://downloads.khinsider.com{album_link['href']}"
|
||||||
|
|
||||||
|
albums.append({
|
||||||
|
"name": album_name,
|
||||||
|
"url": album_url
|
||||||
|
})
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
return [x for x in albums if not (x['url'] in seen or seen.add(x['url']))]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Search error: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_album_tracks(album_url):
|
||||||
|
try:
|
||||||
|
response = polite_request(album_url)
|
||||||
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
|
tracks = []
|
||||||
|
|
||||||
|
for row in soup.select('table#songlist tr:has(a[href$=".mp3"]), table#songlist tr:has(a[href$=".flac"])'):
|
||||||
|
link = row.find('a', href=True)
|
||||||
|
if link and (link['href'].endswith('.mp3') or link['href'].endswith('.flac')):
|
||||||
|
track_url = link['href'] if link['href'].startswith('http') else f"https://downloads.khinsider.com{link['href']}"
|
||||||
|
tracks.append({
|
||||||
|
'name': link.text.strip(),
|
||||||
|
'url': track_url,
|
||||||
|
'format': 'flac' if link['href'].lower().endswith('.flac') else 'mp3'
|
||||||
|
})
|
||||||
|
|
||||||
|
return tracks
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Album error: {str(e)}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_download_url(track_page_url):
|
||||||
|
try:
|
||||||
|
response = polite_request(track_page_url)
|
||||||
|
soup = BeautifulSoup(response.text, 'html.parser')
|
||||||
|
|
||||||
|
audio_links = []
|
||||||
|
for link in soup.find_all('a', href=True):
|
||||||
|
href = link['href']
|
||||||
|
if href.endswith(('.mp3', '.flac')):
|
||||||
|
decoded_url = urllib.parse.unquote(href)
|
||||||
|
if not decoded_url.startswith('http'):
|
||||||
|
decoded_url = f"https://downloads.khinsider.com{decoded_url}"
|
||||||
|
audio_links.append(decoded_url)
|
||||||
|
|
||||||
|
flac_links = [link for link in audio_links if link.endswith('.flac')]
|
||||||
|
if flac_links:
|
||||||
|
return flac_links[0]
|
||||||
|
|
||||||
|
mp3_links = [link for link in audio_links if link.endswith('.mp3')]
|
||||||
|
if mp3_links:
|
||||||
|
return mp3_links[0]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Download URL error: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
app.run(host='127.0.0.1', port=5000, debug=true)
|
||||||
Loading…
Add table
Add a link
Reference in a new issue