Add Runnable.get_graph() to get a graph representation of a Runnable (#15040)

It can be drawn in ascii with Runnable.get_graph().draw()
pull/15080/head
Nuno Campos 5 months ago committed by GitHub
parent aad3d8bd47
commit 7d5800ee51
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -12,6 +12,7 @@ from typing import (
List,
Mapping,
Optional,
Sequence,
Type,
TypeVar,
Union,
@ -163,6 +164,9 @@ class ContextGet(RunnableSerializable):
key: Union[str, List[str]]
def __str__(self) -> str:
return f"ContextGet({_print_keys(self.key)})"
@property
def ids(self) -> List[str]:
prefix = self.prefix + "/" if self.prefix else ""
@ -243,6 +247,9 @@ class ContextSet(RunnableSerializable):
prefix=prefix,
)
def __str__(self) -> str:
return f"ContextSet({_print_keys(list(self.keys.keys()))})"
@property
def ids(self) -> List[str]:
prefix = self.prefix + "/" if self.prefix else ""
@ -345,3 +352,10 @@ class PrefixContext:
**kwargs: SetValue,
) -> ContextSet:
return ContextSet(_key, _value, prefix=self.prefix, **kwargs)
def _print_keys(keys: Union[str, Sequence[str]]) -> str:
if isinstance(keys, str):
return f"'{keys}'"
else:
return ", ".join(f"'{k}'" for k in keys)

@ -74,6 +74,7 @@ if TYPE_CHECKING:
from langchain_core.runnables.fallbacks import (
RunnableWithFallbacks as RunnableWithFallbacksT,
)
from langchain_core.runnables.graph import Graph
from langchain_core.tracers.log_stream import RunLog, RunLogPatch
from langchain_core.tracers.root_listeners import Listener
@ -352,6 +353,18 @@ class Runnable(Generic[Input, Output], ABC):
},
)
def get_graph(self, config: Optional[RunnableConfig] = None) -> Graph:
"""Return a graph representation of this runnable."""
from langchain_core.runnables.graph import Graph
graph = Graph()
input_node = graph.add_node(self.get_input_schema(config))
runnable_node = graph.add_node(self)
output_node = graph.add_node(self.get_output_schema(config))
graph.add_edge(input_node, runnable_node)
graph.add_edge(runnable_node, output_node)
return graph
def __or__(
self,
other: Union[
@ -1447,6 +1460,26 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
return get_unique_config_specs(spec for spec, _ in all_specs)
def get_graph(self, config: Optional[RunnableConfig] = None) -> Graph:
from langchain_core.runnables.graph import Graph
graph = Graph()
for step in self.steps:
current_last_node = graph.last_node()
step_graph = step.get_graph(config)
if step is not self.first:
step_graph.trim_first_node()
if step is not self.last:
step_graph.trim_last_node()
graph.extend(step_graph)
step_first_node = step_graph.first_node()
if not step_first_node:
raise ValueError(f"Runnable {step} has no first node")
if current_last_node:
graph.add_edge(current_last_node, step_first_node)
return graph
def __repr__(self) -> str:
return "\n| ".join(
repr(s) if i == 0 else indent_lines_after_first(repr(s), "| ")
@ -1992,6 +2025,31 @@ class RunnableParallel(RunnableSerializable[Input, Dict[str, Any]]):
spec for step in self.steps.values() for spec in step.config_specs
)
def get_graph(self, config: Optional[RunnableConfig] = None) -> Graph:
from langchain_core.runnables.graph import Graph
graph = Graph()
input_node = graph.add_node(self.get_input_schema(config))
output_node = graph.add_node(self.get_output_schema(config))
for step in self.steps.values():
step_graph = step.get_graph()
step_graph.trim_first_node()
step_graph.trim_last_node()
if not step_graph:
graph.add_edge(input_node, output_node)
else:
graph.extend(step_graph)
step_first_node = step_graph.first_node()
if not step_first_node:
raise ValueError(f"Runnable {step} has no first node")
step_last_node = step_graph.last_node()
if not step_last_node:
raise ValueError(f"Runnable {step} has no last node")
graph.add_edge(input_node, step_first_node)
graph.add_edge(step_last_node, output_node)
return graph
def __repr__(self) -> str:
map_for_repr = ",\n ".join(
f"{k}: {indent_lines_after_first(repr(v), ' ' + k + ': ')}"

@ -0,0 +1,133 @@
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Dict, List, NamedTuple, Optional, Type, Union
from uuid import uuid4
from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables.base import Runnable
from langchain_core.runnables.graph_draw import draw
class Edge(NamedTuple):
source: str
target: str
class Node(NamedTuple):
id: str
data: Union[Type[BaseModel], Runnable]
@dataclass
class Graph:
nodes: Dict[str, Node] = field(default_factory=dict)
edges: List[Edge] = field(default_factory=list)
def __bool__(self) -> bool:
return bool(self.nodes)
def next_id(self) -> str:
return uuid4().hex
def add_node(self, data: Union[Type[BaseModel], Runnable]) -> Node:
"""Add a node to the graph and return it."""
node = Node(id=self.next_id(), data=data)
self.nodes[node.id] = node
return node
def remove_node(self, node: Node) -> None:
"""Remove a node from the graphm and all edges connected to it."""
self.nodes.pop(node.id)
self.edges = [
edge
for edge in self.edges
if edge.source != node.id and edge.target != node.id
]
def add_edge(self, source: Node, target: Node) -> Edge:
"""Add an edge to the graph and return it."""
if source.id not in self.nodes:
raise ValueError(f"Source node {source.id} not in graph")
if target.id not in self.nodes:
raise ValueError(f"Target node {target.id} not in graph")
edge = Edge(source=source.id, target=target.id)
self.edges.append(edge)
return edge
def extend(self, graph: Graph) -> None:
"""Add all nodes and edges from another graph.
Note this doesn't check for duplicates, nor does it connect the graphs."""
self.nodes.update(graph.nodes)
self.edges.extend(graph.edges)
def first_node(self) -> Optional[Node]:
"""Find the single node that is not a target of any edge.
If there is no such node, or there are multiple, return None.
When drawing the graph this node would be the origin."""
targets = {edge.target for edge in self.edges}
found: List[Node] = []
for node in self.nodes.values():
if node.id not in targets:
found.append(node)
return found[0] if len(found) == 1 else None
def last_node(self) -> Optional[Node]:
"""Find the single node that is not a source of any edge.
If there is no such node, or there are multiple, return None.
When drawing the graph this node would be the destination.
"""
sources = {edge.source for edge in self.edges}
found: List[Node] = []
for node in self.nodes.values():
if node.id not in sources:
found.append(node)
return found[0] if len(found) == 1 else None
def trim_first_node(self) -> None:
"""Remove the first node if it exists and has a single outgoing edge,
ie. if removing it would not leave the graph without a "first" node."""
first_node = self.first_node()
if first_node:
if (
len(self.nodes) == 1
or len([edge for edge in self.edges if edge.source == first_node.id])
== 1
):
self.remove_node(first_node)
def trim_last_node(self) -> None:
"""Remove the last node if it exists and has a single incoming edge,
ie. if removing it would not leave the graph without a "last" node."""
last_node = self.last_node()
if last_node:
if (
len(self.nodes) == 1
or len([edge for edge in self.edges if edge.target == last_node.id])
== 1
):
self.remove_node(last_node)
def draw_ascii(self) -> str:
def node_data(node: Node) -> str:
if isinstance(node.data, Runnable):
try:
data = str(node.data)
if (
data.startswith("<")
or data[0] != data[0].upper()
or len(data.splitlines()) > 1
):
data = node.data.__class__.__name__
elif len(data) > 36:
data = data[:36] + "..."
except Exception:
data = node.data.__class__.__name__
else:
data = node.data.__name__
return data
return draw(
{node.id: node_data(node) for node in self.nodes.values()},
[(edge.source, edge.target) for edge in self.edges],
)

@ -0,0 +1,304 @@
"""Draws DAG in ASCII.
Adapted from https://github.com/iterative/dvc/blob/main/dvc/dagascii.py"""
import math
import os
from typing import Any, Mapping, Sequence, Tuple
class VertexViewer:
"""Class to define vertex box boundaries that will be accounted for during
graph building by grandalf.
Args:
name (str): name of the vertex.
"""
HEIGHT = 3 # top and bottom box edges + text
def __init__(self, name: str) -> None:
self._h = self.HEIGHT # top and bottom box edges + text
self._w = len(name) + 2 # right and left bottom edges + text
@property
def h(self) -> int:
"""Height of the box."""
return self._h
@property
def w(self) -> int:
"""Width of the box."""
return self._w
class AsciiCanvas:
"""Class for drawing in ASCII.
Args:
cols (int): number of columns in the canvas. Should be > 1.
lines (int): number of lines in the canvas. Should be > 1.
"""
TIMEOUT = 10
def __init__(self, cols: int, lines: int) -> None:
assert cols > 1
assert lines > 1
self.cols = cols
self.lines = lines
self.canvas = [[" "] * cols for line in range(lines)]
def draw(self) -> str:
"""Draws ASCII canvas on the screen."""
lines = map("".join, self.canvas)
return os.linesep.join(lines)
def point(self, x: int, y: int, char: str) -> None:
"""Create a point on ASCII canvas.
Args:
x (int): x coordinate. Should be >= 0 and < number of columns in
the canvas.
y (int): y coordinate. Should be >= 0 an < number of lines in the
canvas.
char (str): character to place in the specified point on the
canvas.
"""
assert len(char) == 1
assert x >= 0
assert x < self.cols
assert y >= 0
assert y < self.lines
self.canvas[y][x] = char
def line(self, x0: int, y0: int, x1: int, y1: int, char: str) -> None:
"""Create a line on ASCII canvas.
Args:
x0 (int): x coordinate where the line should start.
y0 (int): y coordinate where the line should start.
x1 (int): x coordinate where the line should end.
y1 (int): y coordinate where the line should end.
char (str): character to draw the line with.
"""
if x0 > x1:
x1, x0 = x0, x1
y1, y0 = y0, y1
dx = x1 - x0
dy = y1 - y0
if dx == 0 and dy == 0:
self.point(x0, y0, char)
elif abs(dx) >= abs(dy):
for x in range(x0, x1 + 1):
if dx == 0:
y = y0
else:
y = y0 + int(round((x - x0) * dy / float(dx)))
self.point(x, y, char)
elif y0 < y1:
for y in range(y0, y1 + 1):
if dy == 0:
x = x0
else:
x = x0 + int(round((y - y0) * dx / float(dy)))
self.point(x, y, char)
else:
for y in range(y1, y0 + 1):
if dy == 0:
x = x0
else:
x = x1 + int(round((y - y1) * dx / float(dy)))
self.point(x, y, char)
def text(self, x: int, y: int, text: str) -> None:
"""Print a text on ASCII canvas.
Args:
x (int): x coordinate where the text should start.
y (int): y coordinate where the text should start.
text (str): string that should be printed.
"""
for i, char in enumerate(text):
self.point(x + i, y, char)
def box(self, x0: int, y0: int, width: int, height: int) -> None:
"""Create a box on ASCII canvas.
Args:
x0 (int): x coordinate of the box corner.
y0 (int): y coordinate of the box corner.
width (int): box width.
height (int): box height.
"""
assert width > 1
assert height > 1
width -= 1
height -= 1
for x in range(x0, x0 + width):
self.point(x, y0, "-")
self.point(x, y0 + height, "-")
for y in range(y0, y0 + height):
self.point(x0, y, "|")
self.point(x0 + width, y, "|")
self.point(x0, y0, "+")
self.point(x0 + width, y0, "+")
self.point(x0, y0 + height, "+")
self.point(x0 + width, y0 + height, "+")
def _build_sugiyama_layout(
vertices: Mapping[str, str], edges: Sequence[Tuple[str, str]]
) -> Any:
try:
from grandalf.graphs import Edge, Graph, Vertex # type: ignore[import]
from grandalf.layouts import SugiyamaLayout # type: ignore[import]
from grandalf.routing import ( # type: ignore[import]
EdgeViewer,
route_with_lines,
)
except ImportError:
print("Install grandalf to draw graphs. `pip install grandalf`")
raise
#
# Just a reminder about naming conventions:
# +------------X
# |
# |
# |
# |
# Y
#
vertices_ = {id: Vertex(f" {data} ") for id, data in vertices.items()}
edges_ = [Edge(vertices_[s], vertices_[e]) for s, e in edges]
vertices_list = vertices_.values()
graph = Graph(vertices_list, edges_)
for vertex in vertices_list:
vertex.view = VertexViewer(vertex.data)
# NOTE: determine min box length to create the best layout
minw = min(v.view.w for v in vertices_list)
for edge in edges_:
edge.view = EdgeViewer()
sug = SugiyamaLayout(graph.C[0])
graph = graph.C[0]
roots = list(filter(lambda x: len(x.e_in()) == 0, graph.sV))
sug.init_all(roots=roots, optimize=True)
sug.yspace = VertexViewer.HEIGHT
sug.xspace = minw
sug.route_edge = route_with_lines
sug.draw()
return sug
def draw(vertices: Mapping[str, str], edges: Sequence[Tuple[str, str]]) -> str:
"""Build a DAG and draw it in ASCII.
Args:
vertices (list): list of graph vertices.
edges (list): list of graph edges.
Returns:
str: ASCII representation
Example:
>>> from dvc.dagascii import draw
>>> vertices = [1, 2, 3, 4]
>>> edges = [(1, 2), (2, 3), (2, 4), (1, 4)]
>>> print(draw(vertices, edges))
+---+ +---+
| 3 | | 4 |
+---+ *+---+
* ** *
* ** *
* * *
+---+ *
| 2 | *
+---+ *
* *
* *
**
+---+
| 1 |
+---+
"""
# NOTE: coordinates might me negative, so we need to shift
# everything to the positive plane before we actually draw it.
Xs = [] # noqa: N806
Ys = [] # noqa: N806
sug = _build_sugiyama_layout(vertices, edges)
for vertex in sug.g.sV:
# NOTE: moving boxes w/2 to the left
Xs.append(vertex.view.xy[0] - vertex.view.w / 2.0)
Xs.append(vertex.view.xy[0] + vertex.view.w / 2.0)
Ys.append(vertex.view.xy[1])
Ys.append(vertex.view.xy[1] + vertex.view.h)
for edge in sug.g.sE:
for x, y in edge.view._pts:
Xs.append(x)
Ys.append(y)
minx = min(Xs)
miny = min(Ys)
maxx = max(Xs)
maxy = max(Ys)
canvas_cols = int(math.ceil(math.ceil(maxx) - math.floor(minx))) + 1
canvas_lines = int(round(maxy - miny))
canvas = AsciiCanvas(canvas_cols, canvas_lines)
# NOTE: first draw edges so that node boxes could overwrite them
for edge in sug.g.sE:
assert len(edge.view._pts) > 1
for index in range(1, len(edge.view._pts)):
start = edge.view._pts[index - 1]
end = edge.view._pts[index]
start_x = int(round(start[0] - minx))
start_y = int(round(start[1] - miny))
end_x = int(round(end[0] - minx))
end_y = int(round(end[1] - miny))
assert start_x >= 0
assert start_y >= 0
assert end_x >= 0
assert end_y >= 0
canvas.line(start_x, start_y, end_x, end_y, "*")
for vertex in sug.g.sV:
# NOTE: moving boxes w/2 to the left
x = vertex.view.xy[0] - vertex.view.w / 2.0
y = vertex.view.xy[1]
canvas.box(
int(round(x - minx)),
int(round(y - miny)),
vertex.view.w,
vertex.view.h,
)
canvas.text(int(round(x - minx)) + 1, int(round(y - miny)) + 1, vertex.data)
return canvas.draw()

@ -155,14 +155,18 @@ def get_lambda_source(func: Callable) -> Optional[str]:
Returns:
str: the source code of the lambda function
"""
try:
name = func.__name__ if func.__name__ != "<lambda>" else None
except AttributeError:
name = None
try:
code = inspect.getsource(func)
tree = ast.parse(textwrap.dedent(code))
visitor = GetLambdaSource()
visitor.visit(tree)
return visitor.source if visitor.count == 1 else None
return visitor.source if visitor.count == 1 else name
except (SyntaxError, TypeError, OSError):
return None
return name
def indent_lines_after_first(text: str, prefix: str) -> str:

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "annotated-types"
@ -555,6 +555,23 @@ files = [
[package.dependencies]
python-dateutil = ">=2.7"
[[package]]
name = "grandalf"
version = "0.8"
description = "Graph and drawing algorithms framework"
optional = false
python-versions = "*"
files = [
{file = "grandalf-0.8-py3-none-any.whl", hash = "sha256:793ca254442f4a79252ea9ff1ab998e852c1e071b863593e5383afee906b4185"},
{file = "grandalf-0.8.tar.gz", hash = "sha256:2813f7aab87f0d20f334a3162ccfbcbf085977134a17a5b516940a93a77ea974"},
]
[package.dependencies]
pyparsing = "*"
[package.extras]
full = ["numpy", "ply"]
[[package]]
name = "idna"
version = "3.4"
@ -1764,6 +1781,20 @@ files = [
plugins = ["importlib-metadata"]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pyparsing"
version = "3.1.1"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.6.8"
files = [
{file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"},
{file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"},
]
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "7.4.3"
@ -2734,4 +2765,4 @@ extended-testing = ["jinja2"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1,<4.0"
content-hash = "64fa7ef31713835d12d5213f04b52adf7423299d023f9558b8b4e65ce1e5262f"
content-hash = "941239bad030d610728f204c17ea49ddbb6fc72e55dadf1691f317f4795c7b1f"

@ -41,6 +41,7 @@ optional = true
[tool.poetry.group.dev.dependencies]
jupyter = "^1.0.0"
setuptools = "^67.6.1"
grandalf = "^0.8"
[tool.poetry.group.test]
optional = true
@ -55,6 +56,7 @@ pytest-mock = "^3.10.0"
syrupy = "^4.0.2"
pytest-watcher = "^0.3.4"
pytest-asyncio = "^0.21.1"
grandalf = "^0.8"
[tool.poetry.group.test_integration]

@ -0,0 +1,88 @@
# serializer version: 1
# name: test_graph_sequence
'''
+-------------+
| PromptInput |
+-------------+
*
*
*
+----------------+
| PromptTemplate |
+----------------+
*
*
*
+-------------+
| FakeListLLM |
+-------------+
*
*
*
+--------------------------------+
| CommaSeparatedListOutputParser |
+--------------------------------+
*
*
*
+--------------------------------------+
| CommaSeparatedListOutputParserOutput |
+--------------------------------------+
'''
# ---
# name: test_graph_sequence_map
'''
+-------------+
| PromptInput |
+-------------+
*
*
*
+----------------+
| PromptTemplate |
+----------------+
*
*
*
+-------------+
| FakeListLLM |
+-------------+
*
*
*
+-----------------------+
| RunnableParallelInput |
+-----------------------+
**** ***
**** ****
** **
+---------------------+ +--------------------------------+
| RunnablePassthrough | | CommaSeparatedListOutputParser |
+---------------------+ +--------------------------------+
**** ***
**** ****
** **
+------------------------+
| RunnableParallelOutput |
+------------------------+
'''
# ---
# name: test_graph_single_runnable
'''
+----------------------+
| StrOutputParserInput |
+----------------------+
*
*
*
+-----------------+
| StrOutputParser |
+-----------------+
*
*
*
+-----------------------+
| StrOutputParserOutput |
+-----------------------+
'''
# ---

File diff suppressed because one or more lines are too long

@ -0,0 +1,51 @@
from syrupy import SnapshotAssertion
from langchain_core.output_parsers.list import CommaSeparatedListOutputParser
from langchain_core.output_parsers.string import StrOutputParser
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.runnables.base import Runnable
from langchain_core.runnables.passthrough import RunnablePassthrough
from tests.unit_tests.fake.llm import FakeListLLM
def test_graph_single_runnable(snapshot: SnapshotAssertion) -> None:
runnable = StrOutputParser()
graph = StrOutputParser().get_graph()
first_node = graph.first_node()
assert first_node is not None
assert first_node.data.schema() == runnable.input_schema.schema() # type: ignore[union-attr]
last_node = graph.last_node()
assert last_node is not None
assert last_node.data.schema() == runnable.output_schema.schema() # type: ignore[union-attr]
assert len(graph.nodes) == 3
assert len(graph.edges) == 2
assert graph.edges[0].source == first_node.id
assert graph.edges[1].target == last_node.id
assert graph.draw_ascii() == snapshot
def test_graph_sequence(snapshot: SnapshotAssertion) -> None:
fake_llm = FakeListLLM(responses=["a"])
prompt = PromptTemplate.from_template("Hello, {name}!")
list_parser = CommaSeparatedListOutputParser()
sequence = prompt | fake_llm | list_parser
graph = sequence.get_graph()
assert graph.draw_ascii() == snapshot
def test_graph_sequence_map(snapshot: SnapshotAssertion) -> None:
fake_llm = FakeListLLM(responses=["a"])
prompt = PromptTemplate.from_template("Hello, {name}!")
list_parser = CommaSeparatedListOutputParser()
sequence: Runnable = (
prompt
| fake_llm
| {
"original": RunnablePassthrough(input_type=str),
"as_list": list_parser,
}
)
graph = sequence.get_graph()
assert graph.draw_ascii() == snapshot

@ -4127,13 +4127,13 @@ def test_representation_of_runnables() -> None:
"""Return 2."""
return 2
assert repr(RunnableLambda(func=f)) == "RunnableLambda(...)"
assert repr(RunnableLambda(func=f)) == "RunnableLambda(f)"
async def af(x: int) -> int:
"""Return 2."""
return 2
assert repr(RunnableLambda(func=f, afunc=af)) == "RunnableLambda(...)"
assert repr(RunnableLambda(func=f, afunc=af)) == "RunnableLambda(f)"
assert repr(
RunnableLambda(lambda x: x + 2)

63
poetry.lock generated

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "aiohttp"
@ -987,7 +987,7 @@ files = [
{file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"},
{file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"},
{file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"},
{file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"},
{file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"},
{file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"},
{file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"},
{file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"},
@ -997,7 +997,6 @@ files = [
{file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"},
{file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"},
{file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"},
{file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"},
@ -1628,7 +1627,7 @@ files = [
[[package]]
name = "langchain"
version = "0.0.349-rc.1"
version = "0.0.352"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@ -1640,9 +1639,9 @@ aiohttp = "^3.8.3"
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
dataclasses-json = ">= 0.5.7, < 0.7"
jsonpatch = "^1.33"
langchain-community = ">=0.0.1-rc.1,<0.1"
langchain-core = ">=0.0.13rc1,<0.1"
langsmith = "~0.0.63"
langchain-community = ">=0.0.2,<0.1"
langchain-core = "^0.1"
langsmith = "~0.0.70"
numpy = "^1"
pydantic = ">=1,<3"
PyYAML = ">=5.3"
@ -1651,7 +1650,7 @@ SQLAlchemy = ">=1.4,<3"
tenacity = "^8.1.0"
[package.extras]
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.8.3,<4.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.13.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<4)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.6.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
all = []
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
clarifai = ["clarifai (>=9.1.0)"]
cli = ["typer (>=0.9.0,<0.10.0)"]
@ -1671,7 +1670,7 @@ url = "libs/langchain"
[[package]]
name = "langchain-community"
version = "0.0.1-rc.1"
version = "0.0.6"
description = "Community contributed LangChain integrations."
optional = false
python-versions = ">=3.8.1,<4.0"
@ -1681,7 +1680,7 @@ develop = true
[package.dependencies]
aiohttp = "^3.8.3"
dataclasses-json = ">= 0.5.7, < 0.7"
langchain-core = ">=0.0.13rc1,<0.1"
langchain-core = "^0.1"
langsmith = "~0.0.63"
numpy = "^1"
PyYAML = ">=5.3"
@ -1691,7 +1690,7 @@ tenacity = "^8.1.0"
[package.extras]
cli = ["typer (>=0.9.0,<0.10.0)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
[package.source]
type = "directory"
@ -1699,7 +1698,7 @@ url = "libs/community"
[[package]]
name = "langchain-core"
version = "0.0.13rc1"
version = "0.1.3"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@ -1725,7 +1724,7 @@ url = "libs/core"
[[package]]
name = "langchain-experimental"
version = "0.0.45"
version = "0.0.47"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
@ -1733,8 +1732,8 @@ files = []
develop = true
[package.dependencies]
langchain = ">=0.0.348,<0.1"
langchain-core = ">=0.0.12,<0.1"
langchain = ">=0.0.350,<0.1"
langchain-core = "^0.1"
[package.extras]
extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "presidio-analyzer (>=2.2.33,<3.0.0)", "presidio-anonymizer (>=2.2.33,<3.0.0)", "sentence-transformers (>=2,<3)", "vowpal-wabbit-next (==0.6.0)"]
@ -1745,13 +1744,13 @@ url = "libs/experimental"
[[package]]
name = "langsmith"
version = "0.0.63"
version = "0.0.74"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langsmith-0.0.63-py3-none-any.whl", hash = "sha256:43a521dd10d8405ac21a0b959e3de33e2270e4abe6c73cc4036232a6990a0793"},
{file = "langsmith-0.0.63.tar.gz", hash = "sha256:ddb2dfadfad3e05151ed8ba1643d1c516024b80fbd0c6263024400ced06a3768"},
{file = "langsmith-0.0.74-py3-none-any.whl", hash = "sha256:5d573dae3c59c84aca9e4d30a79ef49906151a32bf43830ff83863a825993ac2"},
{file = "langsmith-0.0.74.tar.gz", hash = "sha256:249dae3625580fc9c1477be447ecd8dc1db76d1c8b59d0296abc027a37a0ce0b"},
]
[package.dependencies]
@ -1840,6 +1839,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
{file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
{file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
{file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
{file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
{file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
{file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@ -2815,6 +2824,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@ -2822,8 +2832,15 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@ -2840,6 +2857,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@ -2847,6 +2865,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@ -3526,6 +3545,14 @@ files = [
{file = "SQLAlchemy-2.0.21-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win32.whl", hash = "sha256:af520a730d523eab77d754f5cf44cc7dd7ad2d54907adeb3233177eeb22f271b"},
{file = "SQLAlchemy-2.0.21-cp311-cp311-win_amd64.whl", hash = "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:56628ca27aa17b5890391ded4e385bf0480209726f198799b7e980c6bd473bd7"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db726be58837fe5ac39859e0fa40baafe54c6d54c02aba1d47d25536170b690f"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7421c1bfdbb7214313919472307be650bd45c4dc2fcb317d64d078993de045b"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632784f7a6f12cfa0e84bf2a5003b07660addccf5563c132cd23b7cc1d7371a9"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7276cf26145a888f2182a98f204541b519d9ea358a65d82095d9c9e22f917"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2a1f7ffac934bc0ea717fa1596f938483fb8c402233f9b26679b4f7b38d6ab6e"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-win32.whl", hash = "sha256:bfece2f7cec502ec5f759bbc09ce711445372deeac3628f6fa1c16b7fb45b682"},
{file = "SQLAlchemy-2.0.21-cp312-cp312-win_amd64.whl", hash = "sha256:526b869a0f4f000d8d8ee3409d0becca30ae73f494cbb48801da0129601f72c6"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec"},
{file = "SQLAlchemy-2.0.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a95aa0672e3065d43c8aa80080cdd5cc40fe92dc873749e6c1cf23914c4b83af"},

Loading…
Cancel
Save