Skip to content

Commit

Permalink
black repo
Browse files Browse the repository at this point in the history
  • Loading branch information
recalcitrantsupplant committed Jul 2, 2024
1 parent 785abea commit 9439a42
Show file tree
Hide file tree
Showing 18 changed files with 233 additions and 195 deletions.
7 changes: 5 additions & 2 deletions function_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@


if assemble_app is None:
raise RuntimeError("Cannot import prez in the Azure function app. Check requirements.py and pyproject.toml.")
raise RuntimeError(
"Cannot import prez in the Azure function app. Check requirements.py and pyproject.toml."
)


# This is the base URL path that Prez routes will stem from
Expand All @@ -32,7 +34,8 @@
if __name__ == "__main__":
from azure.functions import HttpRequest, Context
import asyncio
req = HttpRequest("GET", "/v", headers={}, body=b'')

req = HttpRequest("GET", "/v", headers={}, body=b"")
context = dict()
loop = asyncio.get_event_loop()

Expand Down
9 changes: 6 additions & 3 deletions prez/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,15 @@
load_local_data_to_oxigraph,
get_oxrdflib_store,
get_system_store,
load_system_data_to_oxigraph, load_annotations_data_to_oxigraph, get_annotations_store,
load_system_data_to_oxigraph,
load_annotations_data_to_oxigraph,
get_annotations_store,
)
from prez.exceptions.model_exceptions import (
ClassNotFoundException,
URINotFoundException,
NoProfilesException, InvalidSPARQLQueryException,
NoProfilesException,
InvalidSPARQLQueryException,
)
from prez.repositories import RemoteSparqlRepo, PyoxigraphRepo, OxrdflibRepo
from prez.routers.identifier import router as identifier_router
Expand Down Expand Up @@ -161,7 +164,7 @@ def assemble_app(
ClassNotFoundException: catch_class_not_found_exception,
URINotFoundException: catch_uri_not_found_exception,
NoProfilesException: catch_no_profiles_exception,
InvalidSPARQLQueryException: catch_invalid_sparql_query
InvalidSPARQLQueryException: catch_invalid_sparql_query,
},
**kwargs
)
Expand Down
5 changes: 1 addition & 4 deletions prez/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,7 @@ class Settings(BaseSettings):
SDO.description,
]
provenance_predicates: Optional[List[URIRef]] = [DCTERMS.provenance]
other_predicates: Optional[List[URIRef]] = [
SDO.color,
REG.status
]
other_predicates: Optional[List[URIRef]] = [SDO.color, REG.status]
sparql_repo_type: str = "remote"
sparql_timeout: int = 30
log_level: str = "INFO"
Expand Down
2 changes: 1 addition & 1 deletion prez/exceptions/model_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,4 @@ class InvalidSPARQLQueryException(Exception):

def __init__(self, error: str):
self.message = f"Invalid SPARQL query: {error}"
super().__init__(self.message)
super().__init__(self.message)
5 changes: 1 addition & 4 deletions prez/routers/api_extras_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,7 @@
responses = json.loads(responses_json.read_text())
cql_json_examples_dir = Path(__file__).parent.parent / "examples/cql"
cql_examples = {
file.stem: {
"summary": file.stem,
"value": json.loads(file.read_text())
}
file.stem: {"summary": file.stem, "value": json.loads(file.read_text())}
for file in cql_json_examples_dir.glob("*.json")
}

Expand Down
2 changes: 1 addition & 1 deletion prez/routers/identifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from rdflib.term import _is_valid_uri

from prez.dependencies import get_data_repo
from prez.services.query_generation.identifier import get_foaf_homepage_query
from prez.services.curie_functions import get_uri_for_curie_id, get_curie_id_for_uri
from prez.services.query_generation.identifier import get_foaf_homepage_query

router = APIRouter(tags=["Identifier Resolution"])

Expand Down
102 changes: 43 additions & 59 deletions prez/routers/ogc_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
get_negotiated_pmts,
get_profile_nodeshape,
get_endpoint_structure,
generate_concept_hierarchy_query, cql_post_parser_dependency,
generate_concept_hierarchy_query,
cql_post_parser_dependency,
)
from prez.models.query_params import QueryParams
from prez.reference_data.prez_ns import EP, ONT, OGCE
Expand All @@ -26,71 +27,63 @@
router = APIRouter(tags=["ogcprez"])


@router.get(
path="/search",
summary="Search",
name=OGCE["search"],
responses=responses
)
@router.get(path="/search", summary="Search", name=OGCE["search"], responses=responses)
@router.get(
"/profiles",
summary="List Profiles",
name=EP["system/profile-listing"],
responses=responses
responses=responses,
)
@router.get(
path="/cql",
summary="CQL GET endpoint",
name=OGCE["cql-get"],
responses=responses
path="/cql", summary="CQL GET endpoint", name=OGCE["cql-get"], responses=responses
)
@router.get(
"/catalogs",
summary="Catalog Listing",
name=OGCE["catalog-listing"],
responses=responses
responses=responses,
)
@router.get(
"/catalogs/{catalogId}/collections",
summary="Collection Listing",
name=OGCE["collection-listing"],
openapi_extra=openapi_extras.get("collection-listing"),
responses=responses
responses=responses,
)
@router.get(
"/catalogs/{catalogId}/collections/{collectionId}/items",
summary="Item Listing",
name=OGCE["item-listing"],
openapi_extra=openapi_extras.get("item-listing"),
responses=responses
responses=responses,
)
@router.get(
"/concept-hierarchy/{parent_curie}/top-concepts",
summary="Top Concepts",
name=OGCE["top-concepts"],
openapi_extra=openapi_extras.get("top-concepts"),
responses=responses
responses=responses,
)
@router.get(
"/concept-hierarchy/{parent_curie}/narrowers",
summary="Narrowers",
name=OGCE["narrowers"],
openapi_extra=openapi_extras.get("narrowers"),
responses=responses
responses=responses,
)
async def listings(
query_params: QueryParams = Depends(),
endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes),
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
cql_parser: CQLParser = Depends(cql_get_parser_dependency),
search_query: ConstructQuery = Depends(generate_search_query),
concept_hierarchy_query: ConceptHierarchyQuery = Depends(
generate_concept_hierarchy_query
),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
query_params: QueryParams = Depends(),
endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes),
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
cql_parser: CQLParser = Depends(cql_get_parser_dependency),
search_query: ConstructQuery = Depends(generate_search_query),
concept_hierarchy_query: ConceptHierarchyQuery = Depends(
generate_concept_hierarchy_query
),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
):
return await listing_function(
data_repo=data_repo,
Expand All @@ -112,26 +105,20 @@ async def listings(
summary="CQL POST endpoint",
name=OGCE["cql-post"],
openapi_extra={
"requestBody": {
"content": {
"application/json": {
"examples": cql_examples
}
}
}
"requestBody": {"content": {"application/json": {"examples": cql_examples}}}
},
responses=responses
responses=responses,
)
async def cql_post_listings(
query_params: QueryParams = Depends(),
endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes),
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
cql_parser: CQLParser = Depends(cql_post_parser_dependency),
search_query: ConstructQuery = Depends(generate_search_query),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
query_params: QueryParams = Depends(),
endpoint_nodeshape: NodeShape = Depends(get_endpoint_nodeshapes),
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
cql_parser: CQLParser = Depends(cql_post_parser_dependency),
search_query: ConstructQuery = Depends(generate_search_query),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
):
return await listing_function(
data_repo=data_repo,
Expand Down Expand Up @@ -160,45 +147,42 @@ async def cql_post_listings(


@router.get(
path="/object",
summary="Object",
name=EP["system/object"],
responses=responses
path="/object", summary="Object", name=EP["system/object"], responses=responses
)
@router.get(
path="/profiles/{profile_curie}",
summary="Profile",
name=EP["system/profile-object"],
openapi_extra=openapi_extras.get("profile-object"),
responses=responses
responses=responses,
)
@router.get(
path="/catalogs/{catalogId}",
summary="Catalog Object",
name=OGCE["catalog-object"],
openapi_extra=openapi_extras.get("catalog-object"),
responses=responses
responses=responses,
)
@router.get(
path="/catalogs/{catalogId}/collections/{collectionId}",
summary="Collection Object",
name=OGCE["collection-object"],
openapi_extra=openapi_extras.get("collection-object"),
responses=responses
responses=responses,
)
@router.get(
path="/catalogs/{catalogId}/collections/{collectionId}/items/{itemId}",
summary="Item Object",
name=OGCE["item-object"],
openapi_extra=openapi_extras.get("item-object"),
responses=responses
responses=responses,
)
async def objects(
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
pmts: NegotiatedPMTs = Depends(get_negotiated_pmts),
endpoint_structure: tuple[str, ...] = Depends(get_endpoint_structure),
profile_nodeshape: NodeShape = Depends(get_profile_nodeshape),
data_repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
):
return await object_function(
data_repo=data_repo,
Expand Down
51 changes: 31 additions & 20 deletions prez/routers/sparql.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from starlette.requests import Request
from starlette.responses import StreamingResponse

from prez.dependencies import get_data_repo, get_system_repo, get_negotiated_pmts
from prez.dependencies import get_data_repo, get_system_repo
from prez.renderers.renderer import return_annotated_rdf
from prez.repositories import Repo
from prez.services.connegp_service import NegotiatedPMTs
Expand All @@ -21,29 +21,35 @@

@router.post("/sparql")
async def sparql_post_passthrough(
# To maintain compatibility with the other SPARQL endpoints,
# /sparql POST endpoint is not a JSON API, it uses
# values encoded with x-www-form-urlencoded
query: Annotated[str, Form()],
# Pydantic validation prevents update queries (the Form would need to be "update")
request: Request,
repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
# To maintain compatibility with the other SPARQL endpoints,
# /sparql POST endpoint is not a JSON API, it uses
# values encoded with x-www-form-urlencoded
query: Annotated[str, Form()],
# Pydantic validation prevents update queries (the Form would need to be "update")
request: Request,
repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
):
return await sparql_endpoint_handler(query, request, repo, system_repo, method="POST")
return await sparql_endpoint_handler(
query, request, repo, system_repo, method="POST"
)


@router.get("/sparql")
async def sparql_get_passthrough(
query: str,
request: Request,
repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
query: str,
request: Request,
repo: Repo = Depends(get_data_repo),
system_repo: Repo = Depends(get_system_repo),
):
return await sparql_endpoint_handler(query, request, repo, system_repo, method="GET")
return await sparql_endpoint_handler(
query, request, repo, system_repo, method="GET"
)


async def sparql_endpoint_handler(query: str, request: Request, repo: Repo, system_repo, method="GET"):
async def sparql_endpoint_handler(
query: str, request: Request, repo: Repo, system_repo, method="GET"
):
pmts = NegotiatedPMTs(
**{
"headers": request.headers,
Expand Down Expand Up @@ -72,13 +78,14 @@ async def sparql_endpoint_handler(query: str, request: Request, repo: Repo, syst
media_type=non_anot_mediatype,
headers=pmts.generate_response_headers(),
)
query_result: 'httpx.Response' = await repo.sparql(query, request.headers.raw, method=method)
query_result: "httpx.Response" = await repo.sparql(
query, request.headers.raw, method=method
)
if isinstance(query_result, dict):
return JSONResponse(content=query_result)
elif isinstance(query_result, Graph):
return Response(
content=query_result.serialize(format="text/turtle"),
status_code=200
content=query_result.serialize(format="text/turtle"), status_code=200
)

dispositions = query_result.headers.get_list("Content-Disposition")
Expand All @@ -92,7 +99,11 @@ async def sparql_endpoint_handler(query: str, request: Request, repo: Repo, syst
# remove transfer-encoding chunked, disposition=attachment, and content-length
headers = dict()
for k, v in query_result.headers.items():
if k.lower() not in ("transfer-encoding", "content-disposition", "content-length"):
if k.lower() not in (
"transfer-encoding",
"content-disposition",
"content-length",
):
headers[k] = v
content = await query_result.aread()
await query_result.aclose()
Expand Down
2 changes: 1 addition & 1 deletion prez/services/curie_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def generate_new_prefix(uri):
else:
ns = f'{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path.rsplit("/", 1)[0]}/'

split_prefix_path = ns[:-1].rsplit('/', 1)
split_prefix_path = ns[:-1].rsplit("/", 1)
if len(split_prefix_path) > 1:
to_generate_prefix_from = split_prefix_path[-1].lower()
# attempt to just use the last part of the path prior to the fragment or "identifier"
Expand Down
Loading

0 comments on commit 9439a42

Please sign in to comment.