Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 126 additions & 8 deletions plugins/labs_panel/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,27 @@
import logging
import requests
import zipfile
import io
import os
import shutil

import fiftyone as fo
import fiftyone.operators as foo
import fiftyone.plugins as fop
import fiftyone.operators.types as types
import fiftyone.core.utils as fou

from .utils import (
list_labs_features,
add_version_info_to_features,
)

fom = fou.lazy_import("fiftyone.management")
logger = logging.getLogger(__name__)


from .utils import list_labs_features, add_version_info_to_features
def is_enterprise():
return hasattr(fo.constants, "TEAMS_VERSION")


class LabsPanel(foo.Panel):
Expand All @@ -27,12 +46,22 @@ def install_plugin(self, ctx):
plugins = ctx.panel.get_state("table")
for p in plugins:
if p["url"] == ctx.panel.state.plugin_url:
fop.download_plugin(
ctx.panel.state.plugin_url,
plugin_names=[p.get("name")],
overwrite=True,
)
pdef = fop.core.get_plugin(p["name"])
if is_enterprise():
zip_path = _download_plugin_dir(
p["url"], extract_to="/tmp/plugins"
)
fom.upload_plugin(
zip_path, overwrite=p.get("curr_version") is not None
)
pdef = fom.get_plugin_info(p["name"])

else:
fop.download_plugin(
ctx.panel.state.plugin_url,
plugin_names=[p.get("name")],
overwrite=True,
)
pdef = fop.core.get_plugin(p["name"])
stale_version = p.get("curr_version")
curr_version = pdef.version
if stale_version:
Expand All @@ -57,7 +86,10 @@ def uninstall_plugin(self, ctx):
if p["name"] == ctx.panel.state.selection:
curr_version = p.get("curr_version")
if curr_version:
fop.delete_plugin(ctx.panel.state.selection)
if is_enterprise():
fom.delete_plugin(ctx.panel.state.selection)
else:
fop.delete_plugin(ctx.panel.state.selection)
ctx.ops.notify(
f"{p['name']} uninstalled",
variant="success",
Expand Down Expand Up @@ -153,3 +185,89 @@ def render(self, ctx):

def register(p):
p.register(LabsPanel)


def _download_plugin_dir(
plugin_url, plugin_branch="main", extract_to="/tmp", zip_name=None
):
"""Download a specific directory from GitHub URL

Args:
plugin_url: GitHub URL to plugin directory
"https://github.com/<owner>/<repo>/tree/<branch>/path/to/dir"
plugin_branch: Branch of the plugin in the GitHub URL
extract_to: local directory to extract contents to
zip_name: name of the zip file for the directory
"""
url_parts = plugin_url.rstrip("/").split("/tree/main/")
owner_repo = url_parts[0].split("github.com/")[-1]
dir_path = url_parts[1] if len(url_parts) > 1 else None
zip_url = (
f"https://api.github.com/repos/{owner_repo}/zipball/{plugin_branch}"
)
response = requests.get(zip_url)

if response.status_code != 200:
logger.info(f"Failed to download {zip_url}: {response.status_code}")
return None

# Create temporary extraction directory
temp_dir = os.path.join(extract_to, "temp")
os.makedirs(temp_dir, exist_ok=True)

extracted_files = []

if zip_name is None:
if dir_path:
zip_name = dir_path.split("/")[-1]
else:
zip_name = owner_repo.replace("/", "_")

with zipfile.ZipFile(io.BytesIO(response.content)) as zip_ref:
for file_info in zip_ref.filelist:
print(file_info)
file_parts = file_info.filename.split("/", 1)

if len(file_parts) > 1:
relative_path = file_parts[1]

if dir_path:
# Extract files in the dir path
if relative_path.startswith(dir_path + "/"):
content_path = relative_path[len(dir_path) + 1 :]

if content_path and not file_info.is_dir():
local_path = os.path.join(temp_dir, content_path)
os.makedirs(
os.path.dirname(local_path), exist_ok=True
)

with zip_ref.open(file_info) as source:
with open(local_path, "wb") as target:
target.write(source.read())

extracted_files.append(local_path)
else:
# Extract all files in the repo
if not file_info.is_dir():
local_path = os.path.join(temp_dir, relative_path)
os.makedirs(os.path.dirname(local_path), exist_ok=True)

with zip_ref.open(file_info) as source:
with open(local_path, "wb") as target:
target.write(source.read())

extracted_files.append(local_path)

if not extracted_files:
shutil.rmtree(temp_dir)
return None

zip_path = os.path.join(extract_to, f"{zip_name}.zip")
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
for file_path in extracted_files:
zipf.write(file_path, os.path.relpath(file_path, temp_dir))

# Clean up temporary directory
shutil.rmtree(temp_dir)
return zip_path
99 changes: 52 additions & 47 deletions plugins/labs_panel/utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import logging
import markdown
import re
from bs4 import BeautifulSoup

import fiftyone as fo
from fiftyone.utils.github import GitHubRepository
import fiftyone.plugins.utils as fopu
import fiftyone.plugins.core as fopc
Expand All @@ -27,38 +26,63 @@ def list_labs_features(info=False):

repo = GitHubRepository("https://github.com/voxel51/labs")
content = repo.get_file("README.md").decode()
html_content = markdown.markdown(content, extensions=["tables"])
heading_tables = _read_tables_from_html(html_content)

# Find h2 headings (##) in the readme
h2_pattern = r"^## (.+)$"
headings = []
for match in re.finditer(h2_pattern, content, re.MULTILINE):
headings.append(
{"h2_heading": match.group(1), "h2_position": match.start()}
)

# Find tables in the readme
table_pattern = r"<table>.*?</table>"
tables = []
for match in re.finditer(table_pattern, content, re.DOTALL):
tables.append(
{"table_content": match.group(0), "table_position": match.start()}
)

plugins = []
for heading in heading_tables:
table = heading_tables[heading]
rows = table.find_all("tr")
for row in rows:
cols = row.find_all("td")
if len(cols) != 2:
continue

try:
name = cols[0].text.strip()
url = cols[0].find("a")["href"]
description = cols[1].text.strip()
plugins.append(
dict(
name=name,
url=url,
description=description,
category=heading,
)
)
except Exception as e:
logger.debug("Failed to parse plugin row: %s", e)
for i, heading in enumerate(headings):
heading_text = heading["h2_heading"]
heading_pos = heading["h2_position"]

next_heading_pos = (
headings[i + 1]["h2_position"]
if i + 1 < len(headings)
else len(content)
)

for table in tables:
if heading_pos < table["table_position"] < next_heading_pos:
soup = BeautifulSoup(table["table_content"], "html.parser")
table_elem = soup.find("table")

for row in table_elem.find_all("tr"):
cols = row.find_all(["td"])
if len(cols) != 2:
continue

try:
name = cols[0].text.strip()
url = cols[0].find("a")["href"]
description = cols[1].text.strip()
plugins.append(
dict(
name=name,
url=url,
description=description,
category=heading_text,
)
)
except Exception as e:
logger.debug("Failed to parse plugin row: %s", e)

if not info:
return plugins

tasks = [(p["url"], None) for p in plugins]
return fopu.get_plugin_info(tasks)
return [fopu.get_plugin_info(p["url"], None) for p in plugins]


def add_version_info_to_features(lab_features):
Expand All @@ -85,22 +109,3 @@ def add_version_info_to_features(lab_features):
p["curr_version"] = plugin_def.version

return lab_features


def _read_tables_from_html(html_content):
soup = BeautifulSoup(html_content, "html.parser")

headings = soup.find_all("h2")
heading_tables = {}

for heading in headings:
heading_text = heading.get_text()
table = heading.find_next("table")

next_heading = heading.find_next(["h1", "h2", "h3", "h4", "h5", "h6"])
if table and (
not next_heading or table.sourceline < next_heading.sourceline
):
heading_tables[heading_text] = table

return heading_tables