Skip to content

Commit ed75b2d

Browse files
reapply linting after rebase
1 parent 18861ea commit ed75b2d

25 files changed

+1125
-979
lines changed

linkml_runtime/dumpers/delimited_file_dumper.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,13 @@
55

66
from json_flattener import GlobalConfig, flatten_to_csv
77
from pydantic import BaseModel
8-
from json_flattener import GlobalConfig
98

109
from linkml_runtime.dumpers.dumper_root import Dumper
1110
from linkml_runtime.dumpers.json_dumper import JSONDumper
1211
from linkml_runtime.linkml_model.meta import SchemaDefinition, SlotDefinitionName
1312
from linkml_runtime.utils.csvutils import get_configmap
1413
from linkml_runtime.utils.schemaview import SchemaView
1514
from linkml_runtime.utils.yamlutils import YAMLRoot
16-
from linkml_runtime.linkml_model.meta import SlotDefinitionName, SchemaDefinition
17-
from linkml_runtime.utils.schemaview import SchemaView
18-
19-
from linkml_runtime.utils.csvutils import get_configmap
20-
from json_flattener import flatten_to_csv
2115

2216

2317
class DelimitedFileDumper(Dumper, ABC):

linkml_runtime/dumpers/json_dumper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import json
2-
from datetime import datetime, date
2+
from datetime import date, datetime
33
from decimal import Decimal
44
from typing import Union
55

linkml_runtime/dumpers/rdflib_dumper.py

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ class RDFLibDumper(Dumper):
2525
This requires a SchemaView object
2626
2727
"""
28+
2829
def as_rdf_graph(
2930
self,
3031
element: Union[BaseModel, YAMLRoot],
@@ -44,7 +45,7 @@ def as_rdf_graph(
4445
if isinstance(prefix_map, Converter):
4546
# TODO replace with `prefix_map = prefix_map.bimap` after making minimum requirement on python 3.8
4647
prefix_map = {record.prefix: record.uri_prefix for record in prefix_map.records}
47-
logger.debug(f'PREFIXMAP={prefix_map}')
48+
logger.debug(f"PREFIXMAP={prefix_map}")
4849
namespaces = schemaview.namespaces()
4950
if prefix_map:
5051
for k, v in prefix_map.items():
@@ -68,7 +69,9 @@ def as_rdf_graph(
6869
self.inject_triples(element, schemaview, g)
6970
return g
7071

71-
def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, target_type: ElementName = None) -> Node:
72+
def inject_triples(
73+
self, element: Any, schemaview: SchemaView, graph: Graph, target_type: ElementName = None
74+
) -> Node:
7275
"""
7376
Inject triples from conversion of element into a Graph
7477
@@ -80,7 +83,7 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
8083
"""
8184
namespaces = schemaview.namespaces()
8285
slot_name_map = schemaview.slot_name_mappings()
83-
logger.debug(f'CONVERT: {element} // {type(element)} // {target_type}')
86+
logger.debug(f"CONVERT: {element} // {type(element)} // {target_type}")
8487
if target_type in schemaview.all_enums():
8588
if isinstance(element, PermissibleValueText):
8689
e = schemaview.get_enum(target_type)
@@ -96,22 +99,22 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
9699
t = schemaview.get_type(target_type)
97100
dt_uri = t.uri
98101
if dt_uri:
99-
if dt_uri == 'rdfs:Resource':
102+
if dt_uri == "rdfs:Resource":
100103
return URIRef(schemaview.expand_curie(element))
101-
elif dt_uri == 'xsd:string':
104+
elif dt_uri == "xsd:string":
102105
return Literal(element)
103106
else:
104107
if "xsd" not in namespaces:
105108
namespaces["xsd"] = XSD
106109
return Literal(element, datatype=namespaces.uri_for(dt_uri))
107110
else:
108-
logger.warning(f'No datatype specified for : {t.name}, using plain Literal')
111+
logger.warning(f"No datatype specified for : {t.name}, using plain Literal")
109112
return Literal(element)
110-
element_vars = {k: v for k, v in vars(element).items() if not k.startswith('_')}
113+
element_vars = {k: v for k, v in vars(element).items() if not k.startswith("_")}
111114
if len(element_vars) == 0:
112115
id_slot = schemaview.get_identifier_slot(target_type)
113116
return self._as_uri(element, id_slot, schemaview)
114-
#return URIRef(schemaview.expand_curie(str(element)))
117+
# return URIRef(schemaview.expand_curie(str(element)))
115118
element_type = type(element)
116119
cn = element_type.class_name
117120
id_slot = schemaview.get_identifier_slot(cn)
@@ -134,7 +137,7 @@ def inject_triples(self, element: Any, schemaview: SchemaView, graph: Graph, tar
134137
if k in slot_name_map:
135138
k = slot_name_map[k].name
136139
else:
137-
logger.error(f'Slot {k} not in name map')
140+
logger.error(f"Slot {k} not in name map")
138141
slot = schemaview.induced_slot(k, cn)
139142
if not slot.identifier:
140143
slot_uri = URIRef(schemaview.get_uri(slot, expand=True))
@@ -151,7 +154,7 @@ def dump(
151154
element: Union[BaseModel, YAMLRoot],
152155
to_file: str,
153156
schemaview: SchemaView = None,
154-
fmt: str = 'turtle',
157+
fmt: str = "turtle",
155158
prefix_map: Union[dict[str, str], Converter, None] = None,
156159
**args,
157160
) -> None:
@@ -171,7 +174,7 @@ def dumps(
171174
self,
172175
element: Union[BaseModel, YAMLRoot],
173176
schemaview: SchemaView = None,
174-
fmt: Optional[str] = 'turtle',
177+
fmt: Optional[str] = "turtle",
175178
prefix_map: Union[dict[str, str], Converter, None] = None,
176179
) -> str:
177180
"""
@@ -183,12 +186,10 @@ def dumps(
183186
:param prefix_map:
184187
:return: serialization of rdflib Graph containing element
185188
"""
186-
return self.as_rdf_graph(element, schemaview, prefix_map=prefix_map).\
187-
serialize(format=fmt)
189+
return self.as_rdf_graph(element, schemaview, prefix_map=prefix_map).serialize(format=fmt)
188190

189191
def _as_uri(self, element_id: str, id_slot: Optional[SlotDefinition], schemaview: SchemaView) -> URIRef:
190192
if id_slot and schemaview.is_slot_percent_encoded(id_slot):
191193
return URIRef(urllib.parse.quote(element_id))
192194
else:
193195
return schemaview.namespaces().uri_for(element_id)
194-

linkml_runtime/linkml_model/linkml_files.py

Lines changed: 52 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ def _generate_next_value_(name, start, count, last_values):
2424

2525

2626
class Source(_AutoName):
27-
""" LinkML package source name """
27+
"""LinkML package source name"""
28+
2829
META = auto()
2930
TYPES = auto()
3031
MAPPINGS = auto()
@@ -33,7 +34,8 @@ class Source(_AutoName):
3334

3435

3536
class Format(_AutoName):
36-
""" LinkML package formats """
37+
"""LinkML package formats"""
38+
3739
EXCEL = auto()
3840
GRAPHQL = auto()
3941
JSON = auto()
@@ -55,48 +57,51 @@ class Format(_AutoName):
5557
SQLSCHEMA = auto()
5658
YAML = auto()
5759

60+
5861
@dataclass
5962
class FormatPath:
6063
path: str
6164
extension: str
6265

63-
def model_path(self, model:str) -> Path:
66+
def model_path(self, model: str) -> Path:
6467
return (Path(self.path) / model).with_suffix(self.extension)
6568

69+
6670
class _Path:
67-
""" LinkML Relative paths"""
68-
EXCEL = FormatPath("excel","xlsx" )
69-
GRAPHQL = FormatPath("graphql","graphql" )
70-
JSON = FormatPath("json","json" )
71-
JSONLD = FormatPath("jsonld","context.jsonld" )
72-
JSON_SCHEMA = FormatPath("jsonschema", "schema.json" )
73-
NATIVE_JSONLD = FormatPath("jsonld", "context.jsonld" )
74-
NATIVE_RDF = FormatPath("rdf","ttl" )
75-
NATIVE_SHEXC = FormatPath("shex","shex" )
76-
NATIVE_SHEXJ = FormatPath("shex","shexj" )
77-
OWL = FormatPath("owl","owl.ttl" )
78-
PREFIXMAP = FormatPath('prefixmap','yaml' )
79-
PROTOBUF = FormatPath("protobuf","proto" )
80-
PYTHON = FormatPath("","py" )
81-
RDF = FormatPath("rdf","ttl" )
82-
SHACL = FormatPath("shacl","shacl.ttl" )
83-
SHEXC = FormatPath("shex","shex" )
84-
SHEXJ = FormatPath("shex","shexj" )
85-
SQLDDL = FormatPath("sqlddl","sql" )
86-
SQLSCHEMA = FormatPath("sqlschema","sql" )
87-
YAML = FormatPath((Path("model") / "schema").as_posix(),"yaml" )
71+
"""LinkML Relative paths"""
72+
73+
EXCEL = FormatPath("excel", "xlsx")
74+
GRAPHQL = FormatPath("graphql", "graphql")
75+
JSON = FormatPath("json", "json")
76+
JSONLD = FormatPath("jsonld", "context.jsonld")
77+
JSON_SCHEMA = FormatPath("jsonschema", "schema.json")
78+
NATIVE_JSONLD = FormatPath("jsonld", "context.jsonld")
79+
NATIVE_RDF = FormatPath("rdf", "ttl")
80+
NATIVE_SHEXC = FormatPath("shex", "shex")
81+
NATIVE_SHEXJ = FormatPath("shex", "shexj")
82+
OWL = FormatPath("owl", "owl.ttl")
83+
PREFIXMAP = FormatPath("prefixmap", "yaml")
84+
PROTOBUF = FormatPath("protobuf", "proto")
85+
PYTHON = FormatPath("", "py")
86+
RDF = FormatPath("rdf", "ttl")
87+
SHACL = FormatPath("shacl", "shacl.ttl")
88+
SHEXC = FormatPath("shex", "shex")
89+
SHEXJ = FormatPath("shex", "shexj")
90+
SQLDDL = FormatPath("sqlddl", "sql")
91+
SQLSCHEMA = FormatPath("sqlschema", "sql")
92+
YAML = FormatPath((Path("model") / "schema").as_posix(), "yaml")
8893

8994
@classmethod
9095
def items(cls) -> dict[str, FormatPath]:
91-
return {k:v for k,v in cls.__dict__.items() if not k.startswith('_')}
96+
return {k: v for k, v in cls.__dict__.items() if not k.startswith("_")}
9297

9398
@classmethod
94-
def get(cls, item:Union[str,Format]) -> FormatPath:
99+
def get(cls, item: Union[str, Format]) -> FormatPath:
95100
if isinstance(item, Format):
96101
item = item.name.upper()
97102
return getattr(cls, item)
98103

99-
def __class_getitem__(cls, item:str) -> FormatPath:
104+
def __class_getitem__(cls, item: str) -> FormatPath:
100105
return getattr(cls, item)
101106

102107

@@ -108,14 +113,15 @@ def __class_getitem__(cls, item:str) -> FormatPath:
108113
Format.PROTOBUF,
109114
Format.SHACL,
110115
Format.SQLDDL,
111-
Format.SQLSCHEMA
116+
Format.SQLSCHEMA,
112117
)
113118

114119

115120
class ReleaseTag(_AutoName):
116-
""" Release tags
121+
"""Release tags
117122
LATEST - the absolute latest in the supplied branch
118-
CURRENT - the latest _released_ version in the supplied branch """
123+
CURRENT - the latest _released_ version in the supplied branch"""
124+
119125
LATEST = auto()
120126
CURRENT = auto()
121127

@@ -137,12 +143,12 @@ def _build_path(source: Source, fmt: Format) -> PathParts:
137143
def _build_loc(base: str, source: Source, fmt: Format) -> str:
138144
"""A github location"""
139145
# urls are always forward slash separated, so hardcoding is appropriate here
140-
path = '/'.join(_build_path(source, fmt))
141-
return urljoin(base, path).replace('blob/', '')
146+
path = "/".join(_build_path(source, fmt))
147+
return urljoin(base, path).replace("blob/", "")
142148

143149

144150
def URL_FOR(source: Source, fmt: Format) -> str:
145-
""" Return the URL to retrieve source in format """
151+
"""Return the URL to retrieve source in format"""
146152
fmt_path: FormatPath = _Path.get(fmt.name)
147153
return f"{LINKML_URL_BASE}{source.value}.{fmt_path.extension}"
148154

@@ -152,14 +158,16 @@ def LOCAL_PATH_FOR(source: Source, fmt: Format) -> str:
152158

153159

154160
def GITHUB_IO_PATH_FOR(source: Source, fmt: Format, version="latest") -> str:
155-
path = '/'.join([version, 'linkml_model', *_build_path(source, fmt)])
161+
path = "/".join([version, "linkml_model", *_build_path(source, fmt)])
156162
return urljoin(GITHUB_IO_BASE, path)
157163

158164

159-
def GITHUB_PATH_FOR(source: Source,
160-
fmt: Format,
161-
release: Optional[Union[ReleaseTag, str]] = ReleaseTag.CURRENT,
162-
branch: Optional[str] = "main") -> str:
165+
def GITHUB_PATH_FOR(
166+
source: Source,
167+
fmt: Format,
168+
release: Optional[Union[ReleaseTag, str]] = ReleaseTag.CURRENT,
169+
branch: Optional[str] = "main",
170+
) -> str:
163171
def do_request(url) -> object:
164172
resp = requests.get(url)
165173
if resp.ok:
@@ -169,7 +177,7 @@ def do_request(url) -> object:
169177
def tag_to_commit(tag: str) -> str:
170178
tags = do_request(f"{GITHUB_API_BASE}tags?per_page=100")
171179
for tagent in tags:
172-
if tagent['name'] == tag:
180+
if tagent["name"] == tag:
173181
return _build_loc(f"{GITHUB_BASE}blob/{tagent['commit']['sha']}/", source, fmt)
174182
raise ValueError(f"Tag: {tag} not found!")
175183

@@ -178,13 +186,13 @@ def tag_to_commit(tag: str) -> str:
178186

179187
# Return the absolute latest entry for branch
180188
if release is ReleaseTag.LATEST or (release is ReleaseTag.CURRENT and branch != "main"):
181-
path = '/'.join([branch, 'linkml_model', *_build_path(source, fmt)])
189+
path = "/".join([branch, "linkml_model", *_build_path(source, fmt)])
182190
return urljoin(GITHUB_BASE, path)
183191

184192
# Return the latest published version
185193
elif release is ReleaseTag.CURRENT:
186194
release = do_request(f"{GITHUB_API_BASE}releases/latest")
187-
return tag_to_commit(release['tag_name'])
195+
return tag_to_commit(release["tag_name"])
188196

189197
# Return a specific tag
190198
else:
@@ -212,7 +220,9 @@ def url(self) -> str:
212220
def file(self) -> str:
213221
return LOCAL_PATH_FOR(self._model, self._format)
214222

215-
def github_loc(self, tag: Optional[str] = None, branch: Optional[str] = None, release: ReleaseTag = None) -> str:
223+
def github_loc(
224+
self, tag: Optional[str] = None, branch: Optional[str] = None, release: ReleaseTag = None
225+
) -> str:
216226
if not tag and not branch and not release:
217227
return GITHUB_IO_PATH_FOR(self._model, self._format)
218228
if tag:

linkml_runtime/loaders/json_loader.py

Lines changed: 23 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,45 @@
11
import json
22
import logging
33
from pathlib import Path
4-
from typing import Union, TextIO, Optional
4+
from typing import Optional, TextIO, Union
55

66
from hbreader import FileInfo
7+
from pydantic import BaseModel
78

89
from linkml_runtime.loaders.loader_root import Loader
910
from linkml_runtime.utils.yamlutils import YAMLRoot
10-
from pydantic import BaseModel
1111

1212
logger = logging.getLogger(__name__)
1313

1414

1515
class JSONLoader(Loader):
1616

17-
def load_as_dict(self,
18-
source: Union[str, dict, TextIO],
19-
*,
20-
base_dir: Optional[str] = None,
21-
metadata: Optional[FileInfo] = None) -> Union[dict, list[dict]]:
22-
data = self._read_source(source, base_dir=base_dir, metadata=metadata, accept_header="application/ld+json, application/json, text/json")
17+
def load_as_dict(
18+
self, source: Union[str, dict, TextIO], *, base_dir: Optional[str] = None, metadata: Optional[FileInfo] = None
19+
) -> Union[dict, list[dict]]:
20+
data = self._read_source(
21+
source,
22+
base_dir=base_dir,
23+
metadata=metadata,
24+
accept_header="application/ld+json, application/json, text/json",
25+
)
2326
data_as_dict = json.loads(data) if isinstance(data, str) else data
2427
return self.json_clean(data_as_dict)
2528

26-
def load_any(self,
27-
source: Union[str, dict, TextIO, Path],
28-
target_class: type[Union[BaseModel, YAMLRoot]],
29-
*,
30-
base_dir: Optional[str] = None,
31-
metadata: Optional[FileInfo] = None,
32-
**_) -> Union[BaseModel, YAMLRoot, list[BaseModel], list[YAMLRoot]]:
29+
def load_any(
30+
self,
31+
source: Union[str, dict, TextIO, Path],
32+
target_class: type[Union[BaseModel, YAMLRoot]],
33+
*,
34+
base_dir: Optional[str] = None,
35+
metadata: Optional[FileInfo] = None,
36+
**_,
37+
) -> Union[BaseModel, YAMLRoot, list[BaseModel], list[YAMLRoot]]:
3338
"""
3439
Load the JSON in source into the python target_class structure
3540
36-
:param source: JSON data source. Can be a URL, a file name, a JSON string, a resolveable filepath or an existing graph
41+
:param source: JSON data source. Can be a URL, a file name, a JSON string, a resolveable filepath,
42+
or an existing graph
3743
:param target_class: LinkML class to load the JSON into
3844
:param base_dir: Base directory that can be used if file name or URL. This is copied into metadata if present
3945
:param metadata: source information. Used by some loaders to record where information came from
@@ -54,7 +60,7 @@ def load_any(self,
5460
data_as_dict = self.load_as_dict(source, base_dir=base_dir, metadata=metadata)
5561

5662
if isinstance(data_as_dict, dict):
57-
typ = data_as_dict.pop('@type', None)
63+
typ = data_as_dict.pop("@type", None)
5864
if typ and typ != target_class.__name__:
5965
logger.warning(f"Warning: input type mismatch. Expected: {target_class.__name__}, Actual: {typ}")
6066

0 commit comments

Comments
 (0)