mirror of
https://github.com/NohamR/GoFileCLI.git
synced 2025-05-24 00:49:00 +00:00
fixes
This commit is contained in:
parent
26dda558e4
commit
f63fa6ff5c
1
.gitignore
vendored
1
.gitignore
vendored
@ -136,3 +136,4 @@ encrypt.py
|
|||||||
assets/sounds/Blow.aiff
|
assets/sounds/Blow.aiff
|
||||||
assets/sounds/Blow.mp3
|
assets/sounds/Blow.mp3
|
||||||
assets/sounds/Blow.wav
|
assets/sounds/Blow.wav
|
||||||
|
/VP8SWj
|
||||||
|
112
gofilecli.py
Executable file → Normal file
112
gofilecli.py
Executable file → Normal file
@ -6,12 +6,13 @@ import argparse
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from dotenv import load_dotenv, set_key
|
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import simpleaudio as sa
|
import simpleaudio as sa
|
||||||
import ping3
|
from tqdm import tqdm
|
||||||
|
import json
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
|
||||||
def reqst(url, method ,headers=None, data=None, files=None, params=None, json=None, logger=None):
|
def reqst(url, method ,headers=None, data=None, files=None, params=None, json=None, logger=None):
|
||||||
@ -24,21 +25,27 @@ def reqst(url, method ,headers=None, data=None, files=None, params=None, json=No
|
|||||||
response = requests.put(url, headers=headers, data=data, files=files, params=params, json=json)
|
response = requests.put(url, headers=headers, data=data, files=files, params=params, json=json)
|
||||||
elif method == "delete":
|
elif method == "delete":
|
||||||
response = requests.delete(url, headers=headers, data=data, files=files, params=params, json=json)
|
response = requests.delete(url, headers=headers, data=data, files=files, params=params, json=json)
|
||||||
|
logger.debug(f"Request to {url} with method {method} returned status code {response.status_code}")
|
||||||
json_response = response.json() # If response content is not JSON, this will raise a ValueError
|
json_response = response.json() # If response content is not JSON, this will raise a ValueError
|
||||||
return json_response
|
return json_response
|
||||||
except requests.exceptions.HTTPError as http_err:
|
except requests.exceptions.HTTPError as http_err:
|
||||||
|
logger.debug(f"Response: {response.text}")
|
||||||
logger.error(f"HTTP error occurred: {http_err}") # Handles HTTP errors (e.g., 404, 500)
|
logger.error(f"HTTP error occurred: {http_err}") # Handles HTTP errors (e.g., 404, 500)
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except requests.exceptions.ConnectionError as conn_err:
|
except requests.exceptions.ConnectionError as conn_err:
|
||||||
|
logger.debug(f"Response: {response.text}")
|
||||||
logger.error(f"Connection error occurred: {conn_err}") # Handles network-related errors
|
logger.error(f"Connection error occurred: {conn_err}") # Handles network-related errors
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except requests.exceptions.Timeout as timeout_err:
|
except requests.exceptions.Timeout as timeout_err:
|
||||||
|
logger.debug(f"Response: {response.text}")
|
||||||
logger.error(f"Timeout error occurred: {timeout_err}") # Handles request timeouts
|
logger.error(f"Timeout error occurred: {timeout_err}") # Handles request timeouts
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except requests.exceptions.RequestException as req_err:
|
except requests.exceptions.RequestException as req_err:
|
||||||
|
logger.debug(f"Response: {response.text}")
|
||||||
logger.error(f"An error occurred: {req_err}") # Catches any other requests-related errors
|
logger.error(f"An error occurred: {req_err}") # Catches any other requests-related errors
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except ValueError as json_err:
|
except ValueError as json_err:
|
||||||
|
logger.debug(f"Response: {response.text}")
|
||||||
logger.error(f"JSON decode error: {json_err}") # Handles issues with JSON decoding
|
logger.error(f"JSON decode error: {json_err}") # Handles issues with JSON decoding
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
@ -141,13 +148,18 @@ def getservers(logger):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def ping_server(url, count=4):
|
def ping_server(url, logger, num_requests=4, delay=0.1):
|
||||||
response_times = []
|
response_times = []
|
||||||
for _ in range(count):
|
for _ in range(num_requests):
|
||||||
response_time = ping3.ping(url)
|
try:
|
||||||
if response_time:
|
start_time = time.time()
|
||||||
response_times.append(response_time)
|
response = requests.head(url)
|
||||||
time.sleep(0.2)
|
response_time = time.time() - start_time
|
||||||
|
if response.status_code == 200:
|
||||||
|
response_times.append(response_time)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
logger.error(f"Error: {e}")
|
||||||
|
time.sleep(delay)
|
||||||
if response_times:
|
if response_times:
|
||||||
avg_response = sum(response_times) / len(response_times)
|
avg_response = sum(response_times) / len(response_times)
|
||||||
return avg_response
|
return avg_response
|
||||||
@ -160,8 +172,8 @@ def test_servers(servers, logger):
|
|||||||
best_time = float('inf')
|
best_time = float('inf')
|
||||||
for server in servers:
|
for server in servers:
|
||||||
logger.debug(f"Pinging {server}...")
|
logger.debug(f"Pinging {server}...")
|
||||||
url = f"{server}.gofile.io"
|
url = f"https://{server}.gofile.io"
|
||||||
avg_time = ping_server(url)
|
avg_time = ping_server(url, logger)
|
||||||
logger.debug(f"Average response time for {server}: {avg_time:.2f} ms")
|
logger.debug(f"Average response time for {server}: {avg_time:.2f} ms")
|
||||||
if avg_time < best_time:
|
if avg_time < best_time:
|
||||||
best_time = avg_time
|
best_time = avg_time
|
||||||
@ -245,7 +257,7 @@ def createfolder(parentFolderId, folderName, logger):
|
|||||||
if response["status"] == "ok":
|
if response["status"] == "ok":
|
||||||
name = response["data"]["name"]
|
name = response["data"]["name"]
|
||||||
code = response["data"]["code"]
|
code = response["data"]["code"]
|
||||||
folderId = response["data"]["folderId"]
|
folderId = response["data"]["id"]
|
||||||
logger.debug(f"""Folder {name} created with code {code} and folderId {folderId}""")
|
logger.debug(f"""Folder {name} created with code {code} and folderId {folderId}""")
|
||||||
return folderId
|
return folderId
|
||||||
else:
|
else:
|
||||||
@ -253,16 +265,50 @@ def createfolder(parentFolderId, folderName, logger):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def read_in_chunks(file_object, CHUNK_SIZE):
|
||||||
|
while True:
|
||||||
|
data = file_object.read(CHUNK_SIZE)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
yield data
|
||||||
|
|
||||||
|
|
||||||
def uploadfile(serverName, folderId, filePath, logger):
|
def uploadfile(serverName, folderId, filePath, logger):
|
||||||
headers = {"Authorization": f"Bearer {TOKEN}"}
|
# reference : https://api.video/blog/tutorials/upload-a-big-video-file-using-python/
|
||||||
files = {
|
|
||||||
'file': (filePath, open(filePath, 'rb')),
|
|
||||||
'folderId': (None, folderId),
|
|
||||||
}
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
# response = requests.post(f"https://{serverName}.gofile.io/contents/uploadfile", headers=headers, files=files).json()
|
# CHUNK_SIZE = 6000000
|
||||||
response = reqst(f"https://{serverName}.gofile.io/contents/uploadfile", headers=headers, files=files, logger=logger, method="post")
|
# content_size = os.stat(filePath).st_size
|
||||||
|
# f = open(filePath, "rb")
|
||||||
|
# index = 0
|
||||||
|
# offset = 0
|
||||||
|
# headers = {"Authorization": f"Bearer {TOKEN}", 'content-type': 'multipart/form-data',}
|
||||||
|
# with tqdm(total=content_size, unit='B', unit_scale=True, desc='Uploading', leave=False) as progress_bar:
|
||||||
|
# for chunk in read_in_chunks(f, CHUNK_SIZE):
|
||||||
|
# offset = index + len(chunk)
|
||||||
|
# headers['Content-Range'] = 'bytes %s-%s/%s' % (index, offset - 1, content_size)
|
||||||
|
# index = offset
|
||||||
|
# try:
|
||||||
|
# # file = {"file": chunk, 'folderId': (None, folderId)}
|
||||||
|
# # response = requests.post(f"https://{serverName}.gofile.io/contents/uploadfile", files=file, headers=headers)
|
||||||
|
# # files = {"file": chunk}
|
||||||
|
# # data = {"folderId": folderId}
|
||||||
|
# files = {"file": chunk,}
|
||||||
|
# response = requests.post(f"https://{serverName}.gofile.io/contents/uploadfile",files=files,headers=headers)
|
||||||
|
# logger.debug("r: %s, Content-Range: %s" % (response, headers['Content-Range']))
|
||||||
|
# progress_bar.update(len(chunk))
|
||||||
|
# except Exception as e:
|
||||||
|
# logger.error(f"Error: {e}")
|
||||||
|
# logger.debug(f"{response.text}")
|
||||||
|
# response = response.json()
|
||||||
|
command = f"""curl -X POST 'https://{serverName}.gofile.io/contents/uploadfile' -H "Authorization: Bearer {TOKEN}" -F "file=@{filePath}" -F "folderId={folderId}" """
|
||||||
|
response = subprocess.run(command, shell=True, capture_output=True, text=True)
|
||||||
|
try:
|
||||||
|
response_json = json.loads(response.stdout)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
logger.error("Failed to parse response as JSON.")
|
||||||
|
return None
|
||||||
speed, elapsed_time = calculate_upload_speed(filePath, start_time)
|
speed, elapsed_time = calculate_upload_speed(filePath, start_time)
|
||||||
|
response = response_json
|
||||||
if response["status"] == "ok":
|
if response["status"] == "ok":
|
||||||
logger.debug(response)
|
logger.debug(response)
|
||||||
name = response["data"]["name"]
|
name = response["data"]["name"]
|
||||||
@ -301,11 +347,16 @@ def upload(filePath, folderPath, folderName, parentFolderId, private, logger):
|
|||||||
else:
|
else:
|
||||||
logger.error("File not found")
|
logger.error("File not found")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
# Getting servers
|
||||||
servers = getservers(logger)
|
servers = getservers(logger)
|
||||||
if servers:
|
if servers:
|
||||||
if len(servers) > 1:
|
if len(servers) > 1: # If there are multiple servers, check the size of the files
|
||||||
serverName = test_servers(servers, logger)
|
if max([os.path.getsize(file) for file in files]) > 100 * 1024 * 1024: # 100 MB in bytes
|
||||||
|
logger.debug("One of the file have a size > 100 MB. Fetching best server...")
|
||||||
|
serverName = test_servers(servers, logger)
|
||||||
|
else:
|
||||||
|
serverName = random.choice(servers)
|
||||||
else:
|
else:
|
||||||
serverName = servers[0]
|
serverName = servers[0]
|
||||||
logger.debug(f"Selected server: {serverName}")
|
logger.debug(f"Selected server: {serverName}")
|
||||||
@ -324,7 +375,10 @@ def upload(filePath, folderPath, folderName, parentFolderId, private, logger):
|
|||||||
parentFolderId = parentFolderId
|
parentFolderId = parentFolderId
|
||||||
logger.debug(f"FolderId: {parentFolderId}")
|
logger.debug(f"FolderId: {parentFolderId}")
|
||||||
else:
|
else:
|
||||||
parentFolderId = PRIVATE_PARENT_ID
|
# parentFolderId = PRIVATE_PARENT_ID
|
||||||
|
logger.info(f"Creating folder: {folderName} for PRIVATE_PARENT_ID: {PRIVATE_PARENT_ID}")
|
||||||
|
folderId = createfolder(PRIVATE_PARENT_ID, None, logger)
|
||||||
|
parentFolderId = folderId
|
||||||
logger.debug(f"FolderId: {parentFolderId}")
|
logger.debug(f"FolderId: {parentFolderId}")
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
@ -356,10 +410,15 @@ def upload(filePath, folderPath, folderName, parentFolderId, private, logger):
|
|||||||
def downloadFile(downloadUrl, path, logger):
|
def downloadFile(downloadUrl, path, logger):
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
headers = {"Authorization": f"Bearer {TOKEN}"}
|
headers = {"Authorization": f"Bearer {TOKEN}"}
|
||||||
# response = requests.get(downloadUrl, headers=headers)
|
response = requests.get(downloadUrl, headers=headers, stream=True)
|
||||||
response = reqst(downloadUrl, headers=headers, logger=logger, method="get")
|
# response = reqst(downloadUrl, headers=headers, logger=logger, method="get")
|
||||||
with open(path, "wb") as f:
|
total_size = int(response.headers.get('content-length', 0))
|
||||||
f.write(response.content)
|
|
||||||
|
with open(path, "wb") as f, tqdm(total=total_size, unit='B', unit_scale=True, desc='Downloading', leave=False) as progress_bar:
|
||||||
|
for chunk in response.iter_content(1024):
|
||||||
|
if chunk:
|
||||||
|
f.write(chunk)
|
||||||
|
progress_bar.update(len(chunk))
|
||||||
logger.debug(f"File downloaded: {path}")
|
logger.debug(f"File downloaded: {path}")
|
||||||
speed, elapsed_time = calculate_upload_speed(path, start_time)
|
speed, elapsed_time = calculate_upload_speed(path, start_time)
|
||||||
return speed, elapsed_time
|
return speed, elapsed_time
|
||||||
@ -437,6 +496,7 @@ def init():
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
global TOKEN
|
global TOKEN
|
||||||
global PRIVATE_PARENT_ID
|
global PRIVATE_PARENT_ID
|
||||||
global ACCOUNT_ID
|
global ACCOUNT_ID
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
cryptography==39.0.1
|
cryptography==39.0.1
|
||||||
ping3==4.0.8
|
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
Requests==2.32.3
|
Requests==2.32.3
|
||||||
simpleaudio==1.0.4
|
simpleaudio==1.0.4
|
||||||
|
tqdm==4.65.0
|
||||||
|
Loading…
x
Reference in New Issue
Block a user