Issue with viewing Beamup logs in new Beamup Panel
Closed this issue · 2 comments
First of all, this new Beamup panel is awesome. 👍 for that. However, when I try to click on “View Log” button, the Firefox try to download as streaming media. I canceled it while downloading for more than 50MB. I think this is early development. As suggestion, nice to have that log viewable UI as other platform.
i just tested that panel you're talking about, and it's actually downloading the logs file. u should just let it the file download and open it with your favorite text editor for an in-depth view of the logs.
and it seems like whoever is hosting it on baby-beamup fixed the logs issue, so u can just run the beamup logs command to retrieve the latest logs only.
Please reopen this issue. I would like to view the logs as like in other SaaS products (heroku, Render & etc.)
For now, I have come up with my own script (thanks to ChatGPT as well) to view live logs with modifications. If anyone interested, they can use it. :D
Beamup Log viewer
import time
from datetime import datetime
import pytz
import requests
import tzlocal
from colorama import Fore, init
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
class BeamUpLogStreamer:
def __init__(
self,
beamup_project,
cookie_str,
refresh_interval=5,
timeout=10,
initial_lines=None,
follow=True,
convert_time=False,
proxy=None,
):
self.url = "https://baby-beamup.club/getLogs"
self.params = {"proj": beamup_project}
self.last_byte = 0
self.refresh_interval = refresh_interval
self.initial_fetch_done = False
self.previous_logs = ""
self.initial_lines = initial_lines
self.convert_time = convert_time
self.follow = follow
self.session = requests.Session()
self.timeout = timeout
self.session.mount(
"https://",
HTTPAdapter(
max_retries=Retry(
total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504]
)
),
)
self.session.proxies = proxy
self.headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"DNT": "1",
"Connection": "keep-alive",
"Referer": "https://baby-beamup.club/",
"Cookie": cookie_str,
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Sec-GPC": "1",
"TE": "trailers",
}
init(autoreset=True)
@staticmethod
def colorize_log_line(line):
"""Applies color to log lines based on their severity."""
if "ERROR" in line:
return Fore.RED + line
elif "WARNING" in line:
return Fore.YELLOW + line
elif "INFO" in line:
return Fore.GREEN + line
else:
return line # Default, no color
@staticmethod
def convert_to_local_time(line):
"""Converts server timestamp in log line to local time."""
try:
timestamp_format = "%Y-%m-%dT%H:%M:%S.%fZ" # Adjust this format as needed
timestamp_end = line.find("Z")
if timestamp_end != -1:
# Truncate fractional seconds to 6 digits
original_timestamp_str = line[:timestamp_end]
fractional_seconds = original_timestamp_str.split(".")[1]
truncated_fractional = fractional_seconds[:6]
timestamp_str = (
original_timestamp_str.split(".")[0]
+ "."
+ truncated_fractional
+ "Z"
)
server_time = datetime.strptime(timestamp_str, timestamp_format)
server_time = server_time.replace(tzinfo=pytz.utc)
local_timezone = tzlocal.get_localzone() # Get the local timezone
local_time = server_time.astimezone(local_timezone)
local_timestamp_str = local_time.strftime(
"%Y-%m-%d %H:%M:%S %Z"
) # Local time format
return line.replace(original_timestamp_str, local_timestamp_str, 1)
except Exception as e:
print(f"Error converting time: {e}")
return line
def fetch_log_chunks(self):
"""Fetches log data in chunks and handles empty chunks."""
self.headers["Range"] = f"bytes={self.last_byte}-"
try:
with self.session.get(
self.url,
headers=self.headers,
params=self.params,
stream=True,
timeout=self.timeout,
) as response:
if response.status_code in [200, 206]:
for chunk in response.iter_content(
chunk_size=1024, decode_unicode=True
):
if chunk:
self.last_byte += len(chunk)
yield chunk
else:
# Encountered empty chunk, break the loop
break
else:
print(
f"Error: Unable to fetch logs, status code {response.status_code}"
)
except requests.RequestException as e:
print(f"Request error: {e}")
def process_and_print_logs(self):
"""Processes and prints logs as they are fetched."""
new_logs = ""
for log_chunk in self.fetch_log_chunks():
new_logs += log_chunk
if not self.initial_fetch_done:
# Handle the initial fetch
if self.initial_lines is not None:
lines = new_logs.splitlines()
initial_content = "\n".join(lines[-self.initial_lines :])
for line in initial_content.splitlines():
if self.convert_time:
line = self.convert_to_local_time(line)
print(self.colorize_log_line(line))
else:
for line in new_logs.splitlines():
if self.convert_time:
line = self.convert_to_local_time(line)
print(self.colorize_log_line(line))
self.initial_fetch_done = True
self.previous_logs = new_logs
else:
# For subsequent fetches, find and print only new content
new_content_start = len(self.previous_logs)
new_content = new_logs[new_content_start:]
for line in new_content.splitlines():
if self.convert_time:
line = self.convert_to_local_time(line)
colored_line = self.colorize_log_line(line)
print(colored_line)
if self.follow:
time.sleep(0.005) # Simple rate limiting for real-time following
self.previous_logs = new_logs
def start_streaming(self):
"""Continuously fetches new logs at specified intervals."""
try:
while True:
self.process_and_print_logs()
time.sleep(self.refresh_interval)
except KeyboardInterrupt:
print("Log streaming stopped by user.")
if __name__ == "__main__":
log_streamer = BeamUpLogStreamer(
beamup_project="882b9915d0fe-mediafusion",
cookie_str="",
follow=True,
initial_lines=10,
convert_time=True,
proxy={
"https": "http://127.0.0.1:1081",
},
)
log_streamer.start_streaming()
Usage
- install pip packages
pip install pytz requests tzlocal colorama
- update the
cookie_str="<token=xxx>"
you can get this by open web dev network tab and check the request headers when click on "See Logs" button. - update other parameter like beamup_project, proxy & etc.
- run the script