Skip to content

Commit

Permalink
correct typing for load_graph
Browse files Browse the repository at this point in the history
handling of empty/nonexistent graph file
  • Loading branch information
ytausch committed Feb 8, 2024
1 parent b376b6f commit 6f3c847
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 23 deletions.
8 changes: 4 additions & 4 deletions conda_forge_tick/auto_tick.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
)
from conda_forge_tick.utils import (
pluck,
load_graph,
dump_graph,
CB_CONFIG,
parse_meta_yaml,
Expand All @@ -77,6 +76,7 @@
parse_munged_run_export,
fold_log_lines,
get_keys_default,
load_existing_graph,
)
from conda_forge_tick.migrators.arch import OSXArm
from conda_forge_tick.migrators.migration_yaml import (
Expand Down Expand Up @@ -988,7 +988,7 @@ def initialize_migrators(
dry_run: bool = False,
) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]:
temp = glob.glob("/tmp/*")
gx = load_graph()
gx = load_existing_graph()
smithy_version = eval_cmd("conda smithy --version").strip()
pinning_version = json.loads(eval_cmd("conda list conda-forge-pinning --json"))[0][
"version"
Expand Down Expand Up @@ -1415,7 +1415,7 @@ def _setup_limits():
resource.setrlimit(resource.RLIMIT_AS, (limit_int, limit_int))


def _update_nodes_with_bot_rerun(gx):
def _update_nodes_with_bot_rerun(gx: nx.DiGraph):
"""Go through all the open PRs and check if they are rerun"""

print("processing bot-rerun labels", flush=True)
Expand Down Expand Up @@ -1558,7 +1558,7 @@ def _remove_closed_pr_json():

def _update_graph_with_pr_info():
_remove_closed_pr_json()
gx = load_graph()
gx = load_existing_graph()
_update_nodes_with_bot_rerun(gx)
_update_nodes_with_new_versions(gx)
dump_graph(gx)
Expand Down
6 changes: 3 additions & 3 deletions conda_forge_tick/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

from doctr.travis import run_command_hiding_token as doctr_run

from .cli_context import CliContext
from . import sensitive_env
from .utils import load_graph
from .cli_context import CliContext
from .lazy_json_backends import get_lazy_json_backends, CF_TICK_GRAPH_DATA_HASHMAPS
from .utils import load_existing_graph

BUILD_URL_KEY = "CIRCLE_BUILD_URL"

Expand Down Expand Up @@ -93,7 +93,7 @@ def deploy(ctx: CliContext):

# make sure the graph can load, if not we will error
try:
gx = load_graph()
gx = load_existing_graph()
# TODO: be more selective about which json to check
for node, attrs in gx.nodes.items():
attrs["payload"]._load()
Expand Down
14 changes: 7 additions & 7 deletions conda_forge_tick/pypi_name_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,24 @@
2: Packages MUST have a test: imports section importing it
"""

import functools
import json
import math
import requests
import yaml
import pathlib
import traceback
import functools

from collections import Counter, defaultdict
from typing import Dict, List, Literal, Optional, Any, Tuple, Set, TypedDict, Union
from os.path import commonprefix
from typing import Dict, List, Literal, Optional, Any, Tuple, Set, TypedDict, Union

import requests
import yaml
from packaging.utils import (
canonicalize_name as canonicalize_pypi_name,
NormalizedName as PypiName,
)

from .utils import as_iterable, load_graph
from .lazy_json_backends import dump, loads, get_all_keys_for_hashmap, LazyJson
from .utils import as_iterable, load_existing_graph


class Mapping(TypedDict):
Expand Down Expand Up @@ -300,7 +300,7 @@ def determine_best_matches_for_pypi_import(
map_by_conda_name[conda_name] = m

graph_file = str(pathlib.Path(".") / "graph.json")
gx = load_graph(graph_file)
gx = load_existing_graph(graph_file)
# TODO: filter out archived feedstocks?

try:
Expand Down
4 changes: 2 additions & 2 deletions conda_forge_tick/update_prs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
from .executors import executor
from .make_graph import ghctx
from .utils import (
load_graph,
github_client,
load_existing_graph,
)

# from conda_forge_tick.profiler import profiling
Expand Down Expand Up @@ -166,7 +166,7 @@ def close_dirty_prs(

# @profiling
def main(ctx: CliContext, job: int = 1, n_jobs: int = 1) -> None:
gx = load_graph()
gx = load_existing_graph()

gx = close_labels(gx, ctx.dry_run, job=job, n_jobs=n_jobs)
gx = update_graph_pr_status(gx, ctx.dry_run, job=job, n_jobs=n_jobs)
Expand Down
5 changes: 2 additions & 3 deletions conda_forge_tick/update_upstream_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,7 @@
IncrementAlphaRawURL,
NVIDIA,
)
from .utils import get_keys_default
from .utils import load_graph
from .utils import get_keys_default, load_existing_graph

T = TypeVar("T")

Expand Down Expand Up @@ -393,7 +392,7 @@ def main(
"""
logger.info("Reading graph")
# Graph enabled for inspection
gx = load_graph()
gx = load_existing_graph()

# Check if 'versions' folder exists or create a new one;
os.makedirs("versions", exist_ok=True)
Expand Down
27 changes: 23 additions & 4 deletions conda_forge_tick/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ def _munge_dict_repr(d):
datetime=datetime,
)

DEFAULT_GRAPH_FILENAME = "graph.json"


@contextlib.contextmanager
def fold_log_lines(title):
Expand Down Expand Up @@ -431,7 +433,6 @@ def dump_graph_json(gx: nx.DiGraph, filename: str = "graph.json") -> None:
links = nld["links"]
links2 = sorted(links, key=lambda x: f'{x["source"]}{x["target"]}')
nld["links"] = links2
from conda_forge_tick.lazy_json_backends import LazyJson

lzj = LazyJson(filename)
with lzj as attrs:
Expand All @@ -447,14 +448,32 @@ def dump_graph(
dump_graph_json(gx, filename)


def load_graph(filename: str = "graph.json") -> nx.DiGraph:
from conda_forge_tick.lazy_json_backends import LazyJson
def load_existing_graph(filename: str = DEFAULT_GRAPH_FILENAME) -> nx.DiGraph:
"""
Load the graph from a file using the lazy json backend.
If a non-existing or empty file is encountered, a FileNotFoundError is raised.
If you expect the graph to possibly not exist, use load_graph.
:return: the graph
"""
gx = load_graph(filename)
if gx is None:
raise FileNotFoundError(f"Graph file {filename} does not exist or is empty")
return gx


def load_graph(filename: str = DEFAULT_GRAPH_FILENAME) -> Optional[nx.DiGraph]:
"""
Load the graph from a file using the lazy json backend.
If you expect the graph to exist, use load_existing_graph.
:return: the graph, or None if the file does not exist or is empty JSON
"""
dta = copy.deepcopy(LazyJson(filename).data)
if dta:
return nx.node_link_graph(dta)
else:
raise FileNotFoundError("Graph file not found.")
return None


# TODO: This type does not support generics yet sadly
Expand Down

0 comments on commit 6f3c847

Please sign in to comment.