aboutsummaryrefslogtreecommitdiff
path: root/venv/lib/python3.8/site-packages/dash/development
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.8/site-packages/dash/development')
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/__init__.py1
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_all_keywords.py105
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_collect_nodes.py78
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_generate_prop_types.py172
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_jl_components_generation.py549
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_py_components_generation.py790
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_py_prop_typing.py197
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/_r_components_generation.py1005
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/base_component.py481
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/build_process.py189
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/component_generator.py296
-rw-r--r--venv/lib/python3.8/site-packages/dash/development/update_components.py179
12 files changed, 4042 insertions, 0 deletions
diff --git a/venv/lib/python3.8/site-packages/dash/development/__init__.py b/venv/lib/python3.8/site-packages/dash/development/__init__.py
new file mode 100644
index 0000000..e2106eb
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/__init__.py
@@ -0,0 +1 @@
+from . import base_component # noqa:F401
diff --git a/venv/lib/python3.8/site-packages/dash/development/_all_keywords.py b/venv/lib/python3.8/site-packages/dash/development/_all_keywords.py
new file mode 100644
index 0000000..f84f7a4
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_all_keywords.py
@@ -0,0 +1,105 @@
+# keyword.kwlist for both Python 2 and 3
+python_keywords = {
+ "False",
+ "None",
+ "True",
+ "and",
+ "as",
+ "assert",
+ "async",
+ "await",
+ "break",
+ "class",
+ "continue",
+ "def",
+ "del",
+ "elif",
+ "else",
+ "except",
+ "exec",
+ "finally",
+ "for",
+ "from",
+ "global",
+ "if",
+ "import",
+ "in",
+ "is",
+ "lambda",
+ "nonlocal",
+ "not",
+ "or",
+ "pass",
+ "print",
+ "raise",
+ "return",
+ "try",
+ "while",
+ "with",
+ "yield",
+}
+
+# This is a set of R reserved words that cannot be used as function
+# argument names.
+#
+# Reserved words can be obtained from R's help pages by executing the
+# statement below:
+# > ?reserved
+
+r_keywords = {
+ "if",
+ "else",
+ "repeat",
+ "while",
+ "function",
+ "for",
+ "in",
+ "next",
+ "break",
+ "TRUE",
+ "FALSE",
+ "NULL",
+ "Inf",
+ "NaN",
+ "NA",
+ "NA_integer_",
+ "NA_real_",
+ "NA_complex_",
+ "NA_character_",
+ "...",
+}
+
+# This is a set of Julia reserved words that cannot be used as function
+# argument names.
+
+julia_keywords = {
+ "baremodule",
+ "begin",
+ "break",
+ "catch",
+ "const",
+ "continue",
+ "do",
+ "else",
+ "elseif",
+ "end",
+ "export",
+ "false",
+ "finally",
+ "for",
+ "function",
+ "global",
+ "if",
+ "import",
+ "let",
+ "local",
+ "macro",
+ "module",
+ "quote",
+ "return",
+ "struct",
+ "true",
+ "try",
+ "using",
+ "while",
+}
diff --git a/venv/lib/python3.8/site-packages/dash/development/_collect_nodes.py b/venv/lib/python3.8/site-packages/dash/development/_collect_nodes.py
new file mode 100644
index 0000000..da19df0
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_collect_nodes.py
@@ -0,0 +1,78 @@
+def is_node(value):
+ return value in ("node", "element")
+
+
+def is_shape(value):
+ return value in ("shape", "exact")
+
+
+def collect_array(a_value, base, nodes):
+ a_type = a_value["name"]
+ if is_node(a_type):
+ nodes.append(base)
+ elif a_type in ("shape", "exact"):
+ nodes = collect_nodes(a_value["value"], base + "[]", nodes)
+ elif a_type == "union":
+ nodes = collect_union(a_value["value"], base + "[]", nodes)
+ elif a_type == "objectOf":
+ nodes = collect_object(a_value["value"], base + "[]", nodes)
+ return nodes
+
+
+def collect_union(type_list, base, nodes):
+ for t in type_list:
+ if is_node(t["name"]):
+ nodes.append(base)
+ elif is_shape(t["name"]):
+ nodes = collect_nodes(t["value"], base, nodes)
+ elif t["name"] == "arrayOf":
+ nodes = collect_array(t["value"], base, nodes)
+ elif t["name"] == "objectOf":
+ nodes = collect_object(t["value"], base, nodes)
+ return nodes
+
+
+def collect_object(o_value, base, nodes):
+ o_name = o_value.get("name")
+ o_key = base + "{}"
+ if is_node(o_name):
+ nodes.append(o_key)
+ elif is_shape(o_name):
+ nodes = collect_nodes(o_value.get("value", {}), o_key, nodes)
+ elif o_name == "union":
+ nodes = collect_union(o_value.get("value"), o_key, nodes)
+ elif o_name == "arrayOf":
+ nodes = collect_array(o_value, o_key, nodes)
+ return nodes
+
+
+def collect_nodes(metadata, base="", nodes=None):
+ nodes = nodes or []
+
+ for prop_name, value in metadata.items():
+ # Support for recursive shapes, the type is directly in the field.
+ t_value = value.get("type", value)
+ p_type = t_value.get("name")
+
+ if base:
+ key = f"{base}.{prop_name}"
+ else:
+ key = prop_name
+ if is_node(p_type):
+ nodes.append(key)
+ elif p_type == "arrayOf":
+ a_value = t_value.get("value", t_value)
+ nodes = collect_array(a_value, key, nodes)
+ elif is_shape(p_type):
+ nodes = collect_nodes(t_value["value"], key, nodes)
+ elif p_type == "union":
+ nodes = collect_union(t_value["value"], key, nodes)
+ elif p_type == "objectOf":
+ o_value = t_value.get("value", {})
+ nodes = collect_object(o_value, key, nodes)
+
+ return nodes
+
+
+def filter_base_nodes(nodes):
+ return [n for n in nodes if not any(e in n for e in ("[]", ".", "{}"))]
diff --git a/venv/lib/python3.8/site-packages/dash/development/_generate_prop_types.py b/venv/lib/python3.8/site-packages/dash/development/_generate_prop_types.py
new file mode 100644
index 0000000..ce9fb7c
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_generate_prop_types.py
@@ -0,0 +1,172 @@
+# tsx components don't have the `.propTypes` property set
+# Generate it instead with the provided metadata.json
+# for them to be able to report invalid prop
+
+import json
+import os
+import re
+
+from dash.development._py_prop_typing import get_custom_ignore
+
+
+init_check_re = re.compile("proptypes.js")
+
+missing_init_msg = """
+{warning_box}
+{title}
+{warning_box}
+
+Add the following to `{namespace}/__init__.py` to enable
+runtime prop types validation with tsx components:
+
+_js_dist.append(dict(
+ dev_package_path="proptypes.js",
+ dev_only=True,
+ namespace="{namespace}"
+))
+
+"""
+
+prop_type_file_template = """// AUTOGENERATED FILE - DO NOT EDIT
+
+var pt = window.PropTypes;
+var pk = window['{package_name}'];
+
+{components_prop_types}
+"""
+
+component_prop_types_template = "pk.{component_name}.propTypes = {prop_types};"
+
+
+def generate_type(type_name):
+ def wrap(*_):
+ return f"pt.{type_name}"
+
+ return wrap
+
+
+def generate_union(prop_info):
+ types = [generate_prop_type(t) for t in prop_info["value"]]
+ return f"pt.oneOfType([{','.join(types)}])"
+
+
+def generate_shape(prop_info):
+ props = []
+ for key, value in prop_info["value"].items():
+ props.append(f"{key}:{generate_prop_type(value)}")
+ inner = "{" + ",".join(props) + "}"
+ return f"pt.shape({inner})"
+
+
+def generate_array_of(prop_info):
+ inner_type = generate_prop_type(prop_info["value"])
+ return f"pt.arrayOf({inner_type})"
+
+
+def generate_any(*_):
+ return "pt.any"
+
+
+def generate_enum(prop_info):
+ values = str([v["value"] for v in prop_info["value"]])
+ return f"pt.oneOf({values})"
+
+
+def generate_object_of(prop_info):
+ return f"pt.objectOf({generate_prop_type(prop_info['value'])})"
+
+
+def generate_tuple(*_):
+ # PropTypes don't have a tuple... just generate an array.
+ return "pt.array"
+
+
+def generate_literal(prop_info):
+ return f"pt.oneOf([{json.dumps(prop_info['value'])}])"
+
+
+prop_types = {
+ "array": generate_type("array"),
+ "arrayOf": generate_array_of,
+ "object": generate_type("object"),
+ "shape": generate_shape,
+ "exact": generate_shape,
+ "string": generate_type("string"),
+ "bool": generate_type("bool"),
+ "number": generate_type("number"),
+ "node": generate_type("node"),
+ "func": generate_any,
+ "element": generate_type("element"),
+ "union": generate_union,
+ "any": generate_any,
+ "custom": generate_any,
+ "enum": generate_enum,
+ "objectOf": generate_object_of,
+ "tuple": generate_tuple,
+ "literal": generate_literal,
+}
+
+
+def generate_prop_type(prop_info):
+ return prop_types[prop_info["name"]](prop_info)
+
+
+def check_init(namespace):
+ path = os.path.join(namespace, "__init__.py")
+ if os.path.exists(path):
+ with open(path, encoding="utf-8", mode="r") as f:
+ if not init_check_re.search(f.read()):
+ title = f"! Missing proptypes.js in `{namespace}/__init__.py` !"
+ print(
+ missing_init_msg.format(
+ namespace=namespace,
+ warning_box="!" * len(title),
+ title=title,
+ )
+ )
+
+
+def generate_prop_types(
+ metadata,
+ package_name,
+ custom_typing_module,
+):
+ patched = []
+
+ custom_ignore = get_custom_ignore(custom_typing_module)
+
+ for component_path, data in metadata.items():
+ filename = component_path.split("/")[-1]
+ extension = filename.split("/")[-1].split(".")[-1]
+ if extension != "tsx":
+ continue
+
+ component_name = filename.split(".")[0]
+
+ props = []
+ for prop_name, prop_data in data.get("props", {}).items():
+ if prop_name in custom_ignore:
+ prop_type = "pt.any"
+ else:
+ prop_type = generate_prop_type(prop_data["type"])
+ props.append(f"{prop_name}:{prop_type}")
+
+ patched.append(
+ component_prop_types_template.format(
+ package_name=package_name,
+ component_name=component_name,
+ prop_types="{" + ",\n ".join(props) + "}",
+ )
+ )
+
+ if patched:
+ with open(
+ os.path.join(package_name, "proptypes.js"), encoding="utf-8", mode="w"
+ ) as f:
+ f.write(
+ prop_type_file_template.format(
+ package_name=package_name, components_prop_types="\n".join(patched)
+ )
+ )
+
+ check_init(package_name)
diff --git a/venv/lib/python3.8/site-packages/dash/development/_jl_components_generation.py b/venv/lib/python3.8/site-packages/dash/development/_jl_components_generation.py
new file mode 100644
index 0000000..60d0998
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_jl_components_generation.py
@@ -0,0 +1,549 @@
+# pylint: disable=consider-using-f-string
+# type: ignore
+import copy
+import os
+import shutil
+import warnings
+import sys
+import importlib
+import uuid
+import hashlib
+
+from ._all_keywords import julia_keywords
+from ._py_components_generation import reorder_props
+
+# uuid of DashBase Julia package.
+jl_dash_base_uuid = "03207cf0-e2b3-4b91-9ca8-690cf0fb507e"
+
+# uuid of Dash Julia package. Used as base for component package uuid
+jl_dash_uuid = "1b08a953-4be3-4667-9a23-3db579824955"
+
+# Declaring longer string templates as globals to improve
+# readability, make method logic clearer to anyone inspecting
+# code below
+jl_component_string = '''
+export {funcname}
+
+"""
+ {funcname}(;kwargs...){children_signatures}
+
+{docstring}
+"""
+function {funcname}(; kwargs...)
+ available_props = Symbol[{component_props}]
+ wild_props = Symbol[{wildcard_symbols}]
+ return Component("{funcname}", "{element_name}", "{module_name}", available_props, wild_props; kwargs...)
+end
+{children_definitions}
+''' # noqa:E501
+
+jl_children_signatures = """
+ {funcname}(children::Any;kwargs...)
+ {funcname}(children_maker::Function;kwargs...)
+"""
+
+jl_children_definitions = """
+{funcname}(children::Any; kwargs...) = {funcname}(;kwargs..., children = children)
+{funcname}(children_maker::Function; kwargs...) = {funcname}(children_maker(); kwargs...)
+"""
+
+jl_package_file_string = """
+module {package_name}
+using {base_package}
+
+const resources_path = realpath(joinpath( @__DIR__, "..", "deps"))
+const version = "{version}"
+
+{component_includes}
+
+function __init__()
+ DashBase.register_package(
+ DashBase.ResourcePkg(
+ "{project_shortname}",
+ resources_path,
+ version = version,
+ [
+ {resources_dist}
+ ]
+ )
+
+ )
+end
+end
+"""
+
+jl_projecttoml_string = """
+name = "{package_name}"
+uuid = "{package_uuid}"
+{authors}version = "{version}"
+
+[deps]
+{base_package} = "{dash_uuid}"
+
+[compat]
+julia = "1.2"
+{base_package} = "{base_version}"
+"""
+
+jl_base_version = {
+ "Dash": "0.1.3, 1.0",
+ "DashBase": "0.1",
+}
+
+jl_component_include_string = 'include("jl/{name}.jl")'
+
+jl_resource_tuple_string = """DashBase.Resource(
+ relative_package_path = {relative_package_path},
+ external_url = {external_url},
+ dynamic = {dynamic},
+ async = {async_string},
+ type = :{type}
+)"""
+
+core_packages = ["dash_html_components", "dash_core_components", "dash_table"]
+
+
+def jl_package_name(namestring):
+ s = namestring.split("_")
+ return "".join(w.capitalize() for w in s)
+
+
+def stringify_wildcards(wclist, no_symbol=False):
+ if no_symbol:
+ wcstring = "|".join("{}-".format(item) for item in wclist)
+ else:
+ wcstring = ", ".join('Symbol("{}-")'.format(item) for item in wclist)
+ return wcstring
+
+
+def get_wildcards_jl(props):
+ return [key.replace("-*", "") for key in props if key.endswith("-*")]
+
+
+def get_jl_prop_types(type_object):
+ """Mapping from the PropTypes js type object to the Julia type."""
+
+ def shape_or_exact():
+ return "lists containing elements {}.\n{}".format(
+ ", ".join("'{}'".format(t) for t in type_object["value"]),
+ "Those elements have the following types:\n{}".format(
+ "\n".join(
+ create_prop_docstring_jl(
+ prop_name=prop_name,
+ type_object=prop,
+ required=prop["required"],
+ description=prop.get("description", ""),
+ indent_num=1,
+ )
+ for prop_name, prop in type_object["value"].items()
+ )
+ ),
+ )
+
+ return dict(
+ array=lambda: "Array",
+ bool=lambda: "Bool",
+ number=lambda: "Real",
+ string=lambda: "String",
+ object=lambda: "Dict",
+ any=lambda: "Bool | Real | String | Dict | Array",
+ element=lambda: "dash component",
+ node=lambda: "a list of or a singular dash component, string or number",
+ # React's PropTypes.oneOf
+ enum=lambda: "a value equal to: {}".format(
+ ", ".join("{}".format(str(t["value"])) for t in type_object["value"])
+ ),
+ # React's PropTypes.oneOfType
+ union=lambda: "{}".format(
+ " | ".join(
+ "{}".format(get_jl_type(subType))
+ for subType in type_object["value"]
+ if get_jl_type(subType) != ""
+ )
+ ),
+ # React's PropTypes.arrayOf
+ arrayOf=lambda: (
+ "Array"
+ + (
+ " of {}s".format(get_jl_type(type_object["value"]))
+ if get_jl_type(type_object["value"]) != ""
+ else ""
+ )
+ ),
+ # React's PropTypes.objectOf
+ objectOf=lambda: "Dict with Strings as keys and values of type {}".format(
+ get_jl_type(type_object["value"])
+ ),
+ # React's PropTypes.shape
+ shape=shape_or_exact,
+ # React's PropTypes.exact
+ exact=shape_or_exact,
+ )
+
+
+def filter_props(props):
+ """Filter props from the Component arguments to exclude:
+ - Those without a "type" or a "flowType" field
+ - Those with arg.type.name in {'func', 'symbol', 'instanceOf'}
+ Parameters
+ ----------
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ Returns
+ -------
+ dict
+ Filtered dictionary with {propName: propMetadata} structure
+ """
+ filtered_props = copy.deepcopy(props)
+
+ for arg_name, arg in list(filtered_props.items()):
+ if "type" not in arg and "flowType" not in arg:
+ filtered_props.pop(arg_name)
+ continue
+
+ # Filter out functions and instances --
+ if "type" in arg: # These come from PropTypes
+ arg_type = arg["type"]["name"]
+ if arg_type in {"func", "symbol", "instanceOf"}:
+ filtered_props.pop(arg_name)
+ elif "flowType" in arg: # These come from Flow & handled differently
+ arg_type_name = arg["flowType"]["name"]
+ if arg_type_name == "signature":
+ # This does the same as the PropTypes filter above, but "func"
+ # is under "type" if "name" is "signature" vs just in "name"
+ if "type" not in arg["flowType"] or arg["flowType"]["type"] != "object":
+ filtered_props.pop(arg_name)
+ else:
+ raise ValueError
+
+ return filtered_props
+
+
+def get_jl_type(type_object):
+ """
+ Convert JS types to Julia types for the component definition
+ Parameters
+ ----------
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ Returns
+ -------
+ str
+ Julia type string
+ """
+ js_type_name = type_object["name"]
+ js_to_jl_types = get_jl_prop_types(type_object=type_object)
+ if js_type_name in js_to_jl_types:
+ prop_type = js_to_jl_types[js_type_name]()
+ return prop_type
+ return ""
+
+
+def print_jl_type(typedata):
+ typestring = get_jl_type(typedata).capitalize()
+ if typestring:
+ typestring += ". "
+ return typestring
+
+
+def create_docstring_jl(component_name, props, description):
+ """Create the Dash component docstring.
+ Parameters
+ ----------
+ component_name: str
+ Component name
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ description: str
+ Component description
+ Returns
+ -------
+ str
+ Dash component docstring
+ """
+ # Ensure props are ordered with children first
+ props = reorder_props(props=props)
+
+ return "A{n} {name} component.\n{description}\nKeyword arguments:\n{args}".format(
+ n="n" if component_name[0].lower() in "aeiou" else "",
+ name=component_name,
+ description=description,
+ args="\n".join(
+ create_prop_docstring_jl(
+ prop_name=p,
+ type_object=prop["type"] if "type" in prop else prop["flowType"],
+ required=prop["required"],
+ description=prop["description"],
+ indent_num=0,
+ )
+ for p, prop in filter_props(props).items()
+ ),
+ )
+
+
+def create_prop_docstring_jl(
+ prop_name,
+ type_object,
+ required,
+ description,
+ indent_num,
+):
+ """
+ Create the Dash component prop docstring
+ Parameters
+ ----------
+ prop_name: str
+ Name of the Dash component prop
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ required: bool
+ Component is required?
+ description: str
+ Dash component description
+ indent_num: int
+ Number of indents to use for the context block
+ (creates 2 spaces for every indent)
+ is_flow_type: bool
+ Does the prop use Flow types? Otherwise, uses PropTypes
+ Returns
+ -------
+ str
+ Dash component prop docstring
+ """
+ jl_type_name = get_jl_type(type_object=type_object)
+
+ indent_spacing = " " * indent_num
+ if "\n" in jl_type_name:
+ return (
+ "{indent_spacing}- `{name}` ({is_required}): {description}. "
+ "{name} has the following type: {type}".format(
+ indent_spacing=indent_spacing,
+ name=prop_name,
+ type=jl_type_name,
+ description=description,
+ is_required="required" if required else "optional",
+ )
+ )
+ return "{indent_spacing}- `{name}` ({type}{is_required}){description}".format(
+ indent_spacing=indent_spacing,
+ name=prop_name,
+ type="{}; ".format(jl_type_name) if jl_type_name else "",
+ description=(": {}".format(description) if description != "" else ""),
+ is_required="required" if required else "optional",
+ )
+
+
+# this logic will permit passing blank Julia prefixes to
+# dash-generate-components, while also enforcing
+# lower case names for the resulting functions; if a prefix
+# is supplied, leave it as-is
+def format_fn_name(prefix, name):
+ if prefix:
+ return "{}_{}".format(prefix, name.lower())
+ return name.lower()
+
+
+def generate_metadata_strings(resources, metatype):
+ def nothing_or_string(v):
+ return '"{}"'.format(v) if v else "nothing"
+
+ return [
+ jl_resource_tuple_string.format(
+ relative_package_path=nothing_or_string(
+ resource.get("relative_package_path", "")
+ ),
+ external_url=nothing_or_string(resource.get("external_url", "")),
+ dynamic=str(resource.get("dynamic", "nothing")).lower(),
+ type=metatype,
+ async_string=":{}".format(str(resource.get("async")).lower())
+ if "async" in resource.keys()
+ else "nothing",
+ )
+ for resource in resources
+ ]
+
+
+def is_core_package(project_shortname):
+ return project_shortname in core_packages
+
+
+def base_package_name(project_shortname):
+ return "DashBase" if is_core_package(project_shortname) else "Dash"
+
+
+def base_package_uid(project_shortname):
+ return jl_dash_base_uuid if is_core_package(project_shortname) else jl_dash_uuid
+
+
+def generate_package_file(project_shortname, components, pkg_data, prefix):
+ package_name = jl_package_name(project_shortname)
+
+ sys.path.insert(0, os.getcwd())
+ mod = importlib.import_module(project_shortname)
+ js_dist = getattr(mod, "_js_dist", [])
+ css_dist = getattr(mod, "_css_dist", [])
+ project_ver = pkg_data.get("version")
+
+ resources_dist = ",\n".join(
+ generate_metadata_strings(js_dist, "js")
+ + generate_metadata_strings(css_dist, "css")
+ )
+
+ package_string = jl_package_file_string.format(
+ package_name=package_name,
+ component_includes="\n".join(
+ [
+ jl_component_include_string.format(
+ name=format_fn_name(prefix, comp_name)
+ )
+ for comp_name in components
+ ]
+ ),
+ resources_dist=resources_dist,
+ version=project_ver,
+ project_shortname=project_shortname,
+ base_package=base_package_name(project_shortname),
+ )
+ file_path = os.path.join("src", package_name + ".jl")
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(package_string)
+ print("Generated {}".format(file_path))
+
+
+def generate_toml_file(project_shortname, pkg_data):
+ package_author = pkg_data.get("author", "")
+ project_ver = pkg_data.get("version")
+ package_name = jl_package_name(project_shortname)
+ u = uuid.UUID(jl_dash_uuid)
+
+ package_uuid = uuid.UUID(
+ hex=u.hex[:-12] + hashlib.sha256(package_name.encode("utf-8")).hexdigest()[-12:]
+ )
+
+ authors_string = (
+ 'authors = ["{}"]\n'.format(package_author) if package_author else ""
+ )
+
+ base_package = base_package_name(project_shortname)
+
+ toml_string = jl_projecttoml_string.format(
+ package_name=package_name,
+ package_uuid=package_uuid,
+ version=project_ver,
+ authors=authors_string,
+ base_package=base_package,
+ base_version=jl_base_version[base_package],
+ dash_uuid=base_package_uid(project_shortname),
+ )
+ file_path = "Project.toml"
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(toml_string)
+ print("Generated {}".format(file_path))
+
+
+def generate_class_string(name, props, description, project_shortname, prefix):
+ # Ensure props are ordered with children first
+ filtered_props = reorder_props(filter_props(props))
+
+ prop_keys = list(filtered_props.keys())
+
+ docstring = (
+ create_docstring_jl(
+ component_name=name, props=filtered_props, description=description
+ )
+ .replace("\r\n", "\n")
+ .replace("$", "\\$")
+ )
+
+ wclist = get_wildcards_jl(props)
+ default_paramtext = ""
+
+ # Filter props to remove those we don't want to expose
+ for item in prop_keys[:]:
+ if item.endswith("-*") or item == "setProps":
+ prop_keys.remove(item)
+ elif item in julia_keywords:
+ prop_keys.remove(item)
+ warnings.warn(
+ (
+ 'WARNING: prop "{}" in component "{}" is a Julia keyword'
+ " - REMOVED FROM THE JULIA COMPONENT"
+ ).format(item, name)
+ )
+
+ default_paramtext += ", ".join(":{}".format(p) for p in prop_keys)
+
+ has_children = "children" in prop_keys
+ funcname = format_fn_name(prefix, name)
+ children_signatures = (
+ jl_children_signatures.format(funcname=funcname) if has_children else ""
+ )
+ children_definitions = (
+ jl_children_definitions.format(funcname=funcname) if has_children else ""
+ )
+ return jl_component_string.format(
+ funcname=format_fn_name(prefix, name),
+ docstring=docstring,
+ component_props=default_paramtext,
+ wildcard_symbols=stringify_wildcards(wclist, no_symbol=False),
+ wildcard_names=stringify_wildcards(wclist, no_symbol=True),
+ element_name=name,
+ module_name=project_shortname,
+ children_signatures=children_signatures,
+ children_definitions=children_definitions,
+ )
+
+
+def generate_struct_file(name, props, description, project_shortname, prefix):
+ props = reorder_props(props=props)
+ import_string = "# AUTO GENERATED FILE - DO NOT EDIT\n"
+ class_string = generate_class_string(
+ name, props, description, project_shortname, prefix
+ )
+
+ file_name = format_fn_name(prefix, name) + ".jl"
+
+ # put component files in src/jl subdir,
+ # this also creates the Julia source directory for the package
+ # if it is missing
+ if not os.path.exists("src/jl"):
+ os.makedirs("src/jl")
+
+ file_path = os.path.join("src", "jl", file_name)
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(import_string)
+ f.write(class_string)
+
+ print("Generated {}".format(file_name))
+
+
+# pylint: disable=unused-argument
+def generate_module(
+ project_shortname, components, metadata, pkg_data, prefix, **kwargs
+):
+ # copy over all JS dependencies from the (Python) components dir
+ # the inst/lib directory for the package won't exist on first call
+ # create this directory if it is missing
+ if os.path.exists("deps"):
+ shutil.rmtree("deps")
+
+ os.makedirs("deps")
+
+ for rel_dirname, _, filenames in os.walk(project_shortname):
+ for filename in filenames:
+ extension = os.path.splitext(filename)[1]
+
+ if extension in [".py", ".pyc", ".json"]:
+ continue
+
+ target_dirname = os.path.join(
+ "deps/", os.path.relpath(rel_dirname, project_shortname)
+ )
+
+ if not os.path.exists(target_dirname):
+ os.makedirs(target_dirname)
+
+ shutil.copy(os.path.join(rel_dirname, filename), target_dirname)
+
+ generate_package_file(project_shortname, components, pkg_data, prefix)
+ generate_toml_file(project_shortname, pkg_data)
diff --git a/venv/lib/python3.8/site-packages/dash/development/_py_components_generation.py b/venv/lib/python3.8/site-packages/dash/development/_py_components_generation.py
new file mode 100644
index 0000000..7b23066
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_py_components_generation.py
@@ -0,0 +1,790 @@
+from collections import OrderedDict
+import copy
+import numbers
+import os
+import typing
+from textwrap import fill, dedent
+
+from typing_extensions import TypedDict, NotRequired, Literal
+from dash.development.base_component import _explicitize_args
+from dash.exceptions import NonExistentEventException
+from ._all_keywords import python_keywords
+from ._collect_nodes import collect_nodes, filter_base_nodes
+from ._py_prop_typing import (
+ get_custom_ignore,
+ get_custom_props,
+ get_prop_typing,
+ shapes,
+ get_custom_imports,
+)
+from .base_component import Component, ComponentType
+
+import_string = """# AUTO GENERATED FILE - DO NOT EDIT
+
+import typing # noqa: F401
+from typing_extensions import TypedDict, NotRequired, Literal # noqa: F401
+from dash.development.base_component import Component, _explicitize_args
+{custom_imports}
+ComponentType = typing.Union[
+ str,
+ int,
+ float,
+ Component,
+ None,
+ typing.Sequence[typing.Union[str, int, float, Component, None]],
+]
+
+NumberType = typing.Union[
+ typing.SupportsFloat, typing.SupportsInt, typing.SupportsComplex
+]
+
+
+"""
+
+
+# pylint: disable=unused-argument,too-many-locals,too-many-branches
+def generate_class_string(
+ typename,
+ props,
+ description,
+ namespace,
+ prop_reorder_exceptions=None,
+ max_props=None,
+ custom_typing_module=None,
+):
+ """Dynamically generate class strings to have nicely formatted docstrings,
+ keyword arguments, and repr.
+ Inspired by http://jameso.be/2013/08/06/namedtuple.html
+ Parameters
+ ----------
+ typename
+ props
+ description
+ namespace
+ prop_reorder_exceptions
+ Returns
+ -------
+ string
+ """
+ # TODO _prop_names, _type, _namespace, and available_properties
+ # can be modified by a Dash JS developer via setattr
+ # TODO - Tab out the repr for the repr of these components to make it
+ # look more like a hierarchical tree
+ # TODO - Include "description" "defaultValue" in the repr and docstring
+ #
+ # TODO - Handle "required"
+ #
+ # TODO - How to handle user-given `null` values? I want to include
+ # an expanded docstring like Dropdown(value=None, id=None)
+ # but by templating in those None values, I have no way of knowing
+ # whether a property is None because the user explicitly wanted
+ # it to be `null` or whether that was just the default value.
+ # The solution might be to deal with default values better although
+ # not all component authors will supply those.
+ c = '''class {typename}(Component):
+ """{docstring}"""
+ _children_props = {children_props}
+ _base_nodes = {base_nodes}
+ _namespace = '{namespace}'
+ _type = '{typename}'
+{shapes}
+
+ def __init__(
+ self,
+ {default_argtext}
+ ):
+ self._prop_names = {list_of_valid_keys}
+ self._valid_wildcard_attributes =\
+ {list_of_valid_wildcard_attr_prefixes}
+ self.available_properties = {list_of_valid_keys}
+ self.available_wildcard_properties =\
+ {list_of_valid_wildcard_attr_prefixes}
+ _explicit_args = kwargs.pop('_explicit_args')
+ _locals = locals()
+ _locals.update(kwargs) # For wildcard attrs and excess named props
+ args = {args}
+ {required_validation}
+ super({typename}, self).__init__({argtext})
+
+setattr({typename}, "__init__", _explicitize_args({typename}.__init__))
+'''
+
+ filtered_props = (
+ filter_props(props)
+ if (prop_reorder_exceptions is not None and typename in prop_reorder_exceptions)
+ or (prop_reorder_exceptions is not None and "ALL" in prop_reorder_exceptions)
+ else reorder_props(filter_props(props))
+ )
+ wildcard_prefixes = repr(parse_wildcards(props))
+ list_of_valid_keys = repr(list(map(str, filtered_props.keys())))
+ custom_ignore = get_custom_ignore(custom_typing_module)
+ docstring = create_docstring(
+ component_name=typename,
+ props=filtered_props,
+ description=description,
+ prop_reorder_exceptions=prop_reorder_exceptions,
+ ignored_props=custom_ignore,
+ ).replace("\r\n", "\n")
+ required_args = required_props(filtered_props)
+ is_children_required = "children" in required_args
+ required_args = [arg for arg in required_args if arg != "children"]
+
+ prohibit_events(props)
+
+ # pylint: disable=unused-variable
+ prop_keys = list(props.keys())
+ if "children" in props and "children" in list_of_valid_keys:
+ prop_keys.remove("children")
+ # TODO For dash 3.0, remove the Optional and = None for proper typing.
+ # Also add the other required props after children.
+ default_argtext = f"children: typing.Optional[{get_prop_typing('node', '', '', {})}] = None,\n "
+ args = "{k: _locals[k] for k in _explicit_args if k != 'children'}"
+ argtext = "children=children, **args"
+ else:
+ default_argtext = ""
+ args = "{k: _locals[k] for k in _explicit_args}"
+ argtext = "**args"
+
+ if len(required_args) == 0:
+ required_validation = ""
+ else:
+ required_validation = f"""
+ for k in {required_args}:
+ if k not in args:
+ raise TypeError(
+ 'Required argument `' + k + '` was not specified.')
+ """
+
+ if is_children_required:
+ required_validation += """
+ if 'children' not in _explicit_args:
+ raise TypeError('Required argument children was not specified.')
+ """
+
+ default_arglist = []
+
+ for prop_key in prop_keys:
+ prop = props[prop_key]
+ if (
+ prop_key.endswith("-*")
+ or prop_key in python_keywords
+ or prop_key == "setProps"
+ ):
+ continue
+
+ type_info = prop.get("type")
+
+ if not type_info:
+ print(f"Invalid prop type for typing: {prop_key}")
+ default_arglist.append(f"{prop_key} = None")
+ continue
+
+ type_name = type_info.get("name")
+
+ custom_props = get_custom_props(custom_typing_module)
+ typed = get_prop_typing(
+ type_name,
+ typename,
+ prop_key,
+ type_info,
+ custom_props=custom_props,
+ custom_ignore=custom_ignore,
+ )
+
+ arg_value = f"{prop_key}: typing.Optional[{typed}] = None"
+
+ default_arglist.append(arg_value)
+
+ if max_props:
+ final_max_props = max_props - (1 if "children" in props else 0)
+ if len(default_arglist) > final_max_props:
+ default_arglist = default_arglist[:final_max_props]
+ docstring += (
+ "\n\n"
+ "Note: due to the large number of props for this component,\n"
+ "not all of them appear in the constructor signature, but\n"
+ "they may still be used as keyword arguments."
+ )
+
+ default_argtext += ",\n ".join(default_arglist + ["**kwargs"])
+ nodes = collect_nodes({k: v for k, v in props.items() if k != "children"})
+
+ return dedent(
+ c.format(
+ typename=typename,
+ namespace=namespace,
+ filtered_props=filtered_props,
+ list_of_valid_wildcard_attr_prefixes=wildcard_prefixes,
+ list_of_valid_keys=list_of_valid_keys,
+ docstring=docstring,
+ default_argtext=default_argtext,
+ args=args,
+ argtext=argtext,
+ required_validation=required_validation,
+ children_props=nodes,
+ base_nodes=filter_base_nodes(nodes) + ["children"],
+ shapes="\n".join(shapes.get(typename, {}).values()),
+ )
+ )
+
+
+def generate_class_file(
+ typename,
+ props,
+ description,
+ namespace,
+ prop_reorder_exceptions=None,
+ max_props=None,
+ custom_typing_module="dash_prop_typing",
+):
+ """Generate a Python class file (.py) given a class string.
+ Parameters
+ ----------
+ typename
+ props
+ description
+ namespace
+ prop_reorder_exceptions
+ Returns
+ -------
+ """
+
+ class_string = generate_class_string(
+ typename,
+ props,
+ description,
+ namespace,
+ prop_reorder_exceptions,
+ max_props,
+ custom_typing_module,
+ )
+
+ custom_imp = get_custom_imports(custom_typing_module)
+ custom_imp = custom_imp.get(typename) or custom_imp.get("*")
+
+ if custom_imp:
+ imports = import_string.format(
+ custom_imports="\n" + "\n".join(custom_imp) + "\n\n"
+ )
+ else:
+ imports = import_string.format(custom_imports="")
+
+ file_name = f"{typename:s}.py"
+
+ file_path = os.path.join(namespace, file_name)
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(imports)
+ f.write(class_string)
+
+ print(f"Generated {file_name}")
+
+
+def generate_imports(project_shortname, components):
+ with open(
+ os.path.join(project_shortname, "_imports_.py"), "w", encoding="utf-8"
+ ) as f:
+ component_imports = "\n".join(f"from .{x} import {x}" for x in components)
+ all_list = ",\n".join(f' "{x}"' for x in components)
+ imports_string = f"{component_imports}\n\n__all__ = [\n{all_list}\n]"
+
+ f.write(imports_string)
+
+
+def generate_classes_files(project_shortname, metadata, *component_generators):
+ components = []
+ for component_path, component_data in metadata.items():
+ component_name = component_path.split("/")[-1].split(".")[0]
+ components.append(component_name)
+
+ for generator in component_generators:
+ generator(
+ component_name,
+ component_data["props"],
+ component_data["description"],
+ project_shortname,
+ )
+
+ return components
+
+
+def generate_class(
+ typename, props, description, namespace, prop_reorder_exceptions=None
+):
+ """Generate a Python class object given a class string.
+ Parameters
+ ----------
+ typename
+ props
+ description
+ namespace
+ Returns
+ -------
+ """
+ string = generate_class_string(
+ typename, props, description, namespace, prop_reorder_exceptions
+ )
+ scope = {
+ "Component": Component,
+ "ComponentType": ComponentType,
+ "_explicitize_args": _explicitize_args,
+ "typing": typing,
+ "numbers": numbers,
+ "TypedDict": TypedDict,
+ "NotRequired": NotRequired,
+ "Literal": Literal,
+ "NumberType": typing.Union[
+ typing.SupportsFloat, typing.SupportsComplex, typing.SupportsInt
+ ],
+ }
+ # pylint: disable=exec-used
+ exec(string, scope)
+ result = scope[typename]
+ return result
+
+
+def required_props(props):
+ """Pull names of required props from the props object.
+ Parameters
+ ----------
+ props: dict
+ Returns
+ -------
+ list
+ List of prop names (str) that are required for the Component
+ """
+ return [prop_name for prop_name, prop in list(props.items()) if prop["required"]]
+
+
+def create_docstring(
+ component_name,
+ props,
+ description,
+ prop_reorder_exceptions=None,
+ ignored_props=tuple(),
+):
+ """Create the Dash component docstring.
+ Parameters
+ ----------
+ component_name: str
+ Component name
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ description: str
+ Component description
+ Returns
+ -------
+ str
+ Dash component docstring
+ """
+ # Ensure props are ordered with children first
+ props = (
+ props
+ if (
+ prop_reorder_exceptions is not None
+ and component_name in prop_reorder_exceptions
+ )
+ or (prop_reorder_exceptions is not None and "ALL" in prop_reorder_exceptions)
+ else reorder_props(props)
+ )
+
+ n = "n" if component_name[0].lower() in "aeiou" else ""
+ args = "\n".join(
+ create_prop_docstring(
+ prop_name=p,
+ type_object=prop["type"] if "type" in prop else prop["flowType"],
+ required=prop["required"],
+ description=prop["description"],
+ default=prop.get("defaultValue"),
+ indent_num=0,
+ is_flow_type="flowType" in prop and "type" not in prop,
+ )
+ for p, prop in filter_props(props, ignored_props).items()
+ )
+
+ return (
+ f"A{n} {component_name} component.\n{description}\n\nKeyword arguments:\n{args}"
+ )
+
+
+def prohibit_events(props):
+ """Events have been removed. Raise an error if we see dashEvents or
+ fireEvents.
+ Parameters
+ ----------
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ Raises
+ -------
+ ?
+ """
+ if "dashEvents" in props or "fireEvents" in props:
+ raise NonExistentEventException(
+ "Events are no longer supported by dash. Use properties instead, "
+ "eg `n_clicks` instead of a `click` event."
+ )
+
+
+def parse_wildcards(props):
+ """Pull out the wildcard attributes from the Component props.
+ Parameters
+ ----------
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ Returns
+ -------
+ list
+ List of Dash valid wildcard prefixes
+ """
+ list_of_valid_wildcard_attr_prefixes = []
+ for wildcard_attr in ["data-*", "aria-*"]:
+ if wildcard_attr in props:
+ list_of_valid_wildcard_attr_prefixes.append(wildcard_attr[:-1])
+ return list_of_valid_wildcard_attr_prefixes
+
+
+def reorder_props(props):
+ """If "children" is in props, then move it to the front to respect dash
+ convention, then 'id', then the remaining props sorted by prop name
+ Parameters
+ ----------
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ Returns
+ -------
+ dict
+ Dictionary with {propName: propMetadata} structure
+ """
+
+ # Constructing an OrderedDict with duplicate keys, you get the order
+ # from the first one but the value from the last.
+ # Doing this to avoid mutating props, which can cause confusion.
+ props1 = [("children", "")] if "children" in props else []
+ props2 = [("id", "")] if "id" in props else []
+ return OrderedDict(props1 + props2 + sorted(list(props.items())))
+
+
+def filter_props(props, ignored_props=tuple()):
+ """Filter props from the Component arguments to exclude:
+ - Those without a "type" or a "flowType" field
+ - Those with arg.type.name in {'func', 'symbol', 'instanceOf'}
+ Parameters
+ ----------
+ props: dict
+ Dictionary with {propName: propMetadata} structure
+ Returns
+ -------
+ dict
+ Filtered dictionary with {propName: propMetadata} structure
+ Examples
+ --------
+ ```python
+ prop_args = {
+ 'prop1': {
+ 'type': {'name': 'bool'},
+ 'required': False,
+ 'description': 'A description',
+ 'flowType': {},
+ 'defaultValue': {'value': 'false', 'computed': False},
+ },
+ 'prop2': {'description': 'A prop without a type'},
+ 'prop3': {
+ 'type': {'name': 'func'},
+ 'description': 'A function prop',
+ },
+ }
+ # filtered_prop_args is now
+ # {
+ # 'prop1': {
+ # 'type': {'name': 'bool'},
+ # 'required': False,
+ # 'description': 'A description',
+ # 'flowType': {},
+ # 'defaultValue': {'value': 'false', 'computed': False},
+ # },
+ # }
+ filtered_prop_args = filter_props(prop_args)
+ ```
+ """
+ filtered_props = copy.deepcopy(props)
+
+ for arg_name, arg in list(filtered_props.items()):
+ if arg_name in ignored_props or ("type" not in arg and "flowType" not in arg):
+ filtered_props.pop(arg_name)
+ continue
+
+ # Filter out functions and instances --
+ # these cannot be passed from Python
+ if "type" in arg: # These come from PropTypes
+ arg_type = arg["type"]["name"]
+ if arg_type in {"func", "symbol", "instanceOf"}:
+ filtered_props.pop(arg_name)
+ elif "flowType" in arg: # These come from Flow & handled differently
+ arg_type_name = arg["flowType"]["name"]
+ if arg_type_name == "signature":
+ # This does the same as the PropTypes filter above, but "func"
+ # is under "type" if "name" is "signature" vs just in "name"
+ if "type" not in arg["flowType"] or arg["flowType"]["type"] != "object":
+ filtered_props.pop(arg_name)
+ else:
+ raise ValueError
+
+ return filtered_props
+
+
+def fix_keywords(txt):
+ """
+ replaces javascript keywords true, false, null with Python keywords
+ """
+ fix_word = {"true": "True", "false": "False", "null": "None"}
+ for js_keyword, python_keyword in fix_word.items():
+ txt = txt.replace(js_keyword, python_keyword)
+ return txt
+
+
+# pylint: disable=too-many-arguments
+# pylint: disable=too-many-locals
+def create_prop_docstring(
+ prop_name,
+ type_object,
+ required,
+ description,
+ default,
+ indent_num,
+ is_flow_type=False,
+):
+ """Create the Dash component prop docstring.
+ Parameters
+ ----------
+ prop_name: str
+ Name of the Dash component prop
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ required: bool
+ Component is required?
+ description: str
+ Dash component description
+ default: dict
+ Either None if a default value is not defined, or
+ dict containing the key 'value' that defines a
+ default value for the prop
+ indent_num: int
+ Number of indents to use for the context block
+ (creates 2 spaces for every indent)
+ is_flow_type: bool
+ Does the prop use Flow types? Otherwise, uses PropTypes
+ Returns
+ -------
+ str
+ Dash component prop docstring
+ """
+ py_type_name = js_to_py_type(
+ type_object=type_object, is_flow_type=is_flow_type, indent_num=indent_num
+ )
+ indent_spacing = " " * indent_num
+
+ default = default["value"] if default else ""
+ default = fix_keywords(default)
+
+ is_required = "optional"
+ if required:
+ is_required = "required"
+ elif default and default not in ["None", "{}", "[]"]:
+ is_required = "default " + default.replace("\n", "")
+
+ # formats description
+ period = "." if description else ""
+ description = description.strip().strip(".").replace('"', r"\"") + period
+ desc_indent = indent_spacing + " "
+ description = fill(
+ description,
+ initial_indent=desc_indent,
+ subsequent_indent=desc_indent,
+ break_long_words=False,
+ break_on_hyphens=False,
+ )
+ description = f"\n{description}" if description else ""
+ colon = ":" if description else ""
+ description = fix_keywords(description)
+
+ if "\n" in py_type_name:
+ # corrects the type
+ dict_or_list = "list of dicts" if py_type_name.startswith("list") else "dict"
+
+ # format and rewrite the intro to the nested dicts
+ intro1, intro2, dict_descr = py_type_name.partition("with keys:")
+ intro = f"`{prop_name}` is a {intro1}{intro2}"
+ intro = fill(
+ intro,
+ initial_indent=desc_indent,
+ subsequent_indent=desc_indent,
+ break_long_words=False,
+ break_on_hyphens=False,
+ )
+
+ # captures optional nested dict description and puts the "or" condition on a new line
+ if "| dict with keys:" in dict_descr:
+ dict_part1, dict_part2 = dict_descr.split(" |", 1)
+ dict_part2 = "".join([desc_indent, "Or", dict_part2])
+ dict_descr = f"{dict_part1}\n\n {dict_part2}"
+
+ # ensures indent is correct if there is a second nested list of dicts
+ current_indent = dict_descr.lstrip("\n").find("-")
+ if current_indent == len(indent_spacing):
+ dict_descr = "".join(
+ "\n\n " + line for line in dict_descr.splitlines() if line != ""
+ )
+
+ return (
+ f"\n{indent_spacing}- {prop_name} ({dict_or_list}; {is_required}){colon}"
+ f"{description}"
+ f"\n\n{intro}{dict_descr}"
+ )
+ tn = f"{py_type_name}; " if py_type_name else ""
+ return f"\n{indent_spacing}- {prop_name} ({tn}{is_required}){colon}{description}"
+
+
+def map_js_to_py_types_prop_types(type_object, indent_num):
+ """Mapping from the PropTypes js type object to the Python type."""
+
+ def shape_or_exact():
+ return "dict with keys:\n" + "\n".join(
+ create_prop_docstring(
+ prop_name=prop_name,
+ type_object=prop,
+ required=prop["required"],
+ description=prop.get("description", ""),
+ default=prop.get("defaultValue"),
+ indent_num=indent_num + 2,
+ )
+ for prop_name, prop in type_object["value"].items()
+ )
+
+ def array_of():
+ inner = js_to_py_type(type_object["value"])
+ if inner:
+ return "list of " + (
+ inner + "s"
+ if inner.split(" ")[0] != "dict"
+ else inner.replace("dict", "dicts", 1)
+ )
+ return "list"
+
+ def tuple_of():
+ elements = [js_to_py_type(element) for element in type_object["elements"]]
+ return f"list of {len(elements)} elements: [{', '.join(elements)}]"
+
+ return dict(
+ array=lambda: "list",
+ bool=lambda: "boolean",
+ number=lambda: "number",
+ string=lambda: "string",
+ object=lambda: "dict",
+ any=lambda: "boolean | number | string | dict | list",
+ element=lambda: "dash component",
+ node=lambda: "a list of or a singular dash component, string or number",
+ # React's PropTypes.oneOf
+ enum=lambda: (
+ "a value equal to: "
+ + ", ".join(str(t["value"]) for t in type_object["value"])
+ ),
+ # React's PropTypes.oneOfType
+ union=lambda: " | ".join(
+ js_to_py_type(subType)
+ for subType in type_object["value"]
+ if js_to_py_type(subType) != ""
+ ),
+ # React's PropTypes.arrayOf
+ arrayOf=array_of,
+ # React's PropTypes.objectOf
+ objectOf=lambda: (
+ "dict with strings as keys and values of type "
+ + js_to_py_type(type_object["value"])
+ ),
+ # React's PropTypes.shape
+ shape=shape_or_exact,
+ # React's PropTypes.exact
+ exact=shape_or_exact,
+ tuple=tuple_of,
+ )
+
+
+def map_js_to_py_types_flow_types(type_object):
+ """Mapping from the Flow js types to the Python type."""
+ return dict(
+ array=lambda: "list",
+ boolean=lambda: "boolean",
+ number=lambda: "number",
+ string=lambda: "string",
+ Object=lambda: "dict",
+ any=lambda: "bool | number | str | dict | list",
+ Element=lambda: "dash component",
+ Node=lambda: "a list of or a singular dash component, string or number",
+ # React's PropTypes.oneOfType
+ union=lambda: " | ".join(
+ js_to_py_type(subType)
+ for subType in type_object["elements"]
+ if js_to_py_type(subType) != ""
+ ),
+ # Flow's Array type
+ Array=lambda: "list"
+ + (
+ f' of {js_to_py_type(type_object["elements"][0])}s'
+ if js_to_py_type(type_object["elements"][0]) != ""
+ else ""
+ ),
+ # React's PropTypes.shape
+ signature=lambda indent_num: (
+ "dict with keys:\n"
+ + "\n".join(
+ create_prop_docstring(
+ prop_name=prop["key"],
+ type_object=prop["value"],
+ required=prop["value"]["required"],
+ description=prop["value"].get("description", ""),
+ default=prop.get("defaultValue"),
+ indent_num=indent_num + 2,
+ is_flow_type=True,
+ )
+ for prop in type_object["signature"]["properties"]
+ )
+ ),
+ )
+
+
+def js_to_py_type(type_object, is_flow_type=False, indent_num=0):
+ """Convert JS types to Python types for the component definition.
+ Parameters
+ ----------
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ is_flow_type: bool
+ Does the prop use Flow types? Otherwise, uses PropTypes
+ indent_num: int
+ Number of indents to use for the docstring for the prop
+ Returns
+ -------
+ str
+ Python type string
+ """
+
+ js_type_name = type_object["name"]
+ js_to_py_types = (
+ map_js_to_py_types_flow_types(type_object=type_object)
+ if is_flow_type
+ else map_js_to_py_types_prop_types(
+ type_object=type_object, indent_num=indent_num
+ )
+ )
+
+ if (
+ "computed" in type_object
+ and type_object["computed"]
+ or type_object.get("type", "") == "function"
+ ):
+ return ""
+ if js_type_name in js_to_py_types:
+ if js_type_name == "signature": # This is a Flow object w/ signature
+ return js_to_py_types[js_type_name](indent_num) # type: ignore[reportCallIssue]
+ # All other types
+ return js_to_py_types[js_type_name]() # type: ignore[reportCallIssue]
+ return ""
diff --git a/venv/lib/python3.8/site-packages/dash/development/_py_prop_typing.py b/venv/lib/python3.8/site-packages/dash/development/_py_prop_typing.py
new file mode 100644
index 0000000..96b2053
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_py_prop_typing.py
@@ -0,0 +1,197 @@
+import collections
+import json
+import string
+import textwrap
+import importlib
+
+from .._utils import pascal_case
+
+
+shapes = {}
+shape_template = """{name} = TypedDict(
+ "{name}",
+ {values}
+)
+"""
+custom_imports = collections.defaultdict(lambda: collections.defaultdict(list))
+
+
+def _get_custom(module_name, prop, default):
+ if not module_name:
+ return default
+ try:
+ module = importlib.import_module(module_name)
+ return getattr(module, prop, default)
+ except ImportError:
+ return default
+
+
+def get_custom_imports(module_name):
+ return _get_custom(module_name, "custom_imports", {})
+
+
+def get_custom_props(module_name):
+ return _get_custom(module_name, "custom_props", {})
+
+
+def get_custom_ignore(module_name):
+ return _get_custom(module_name, "ignore_props", ["style"])
+
+
+def _clean_key(key):
+ k = ""
+ for ch in key:
+ if ch not in string.ascii_letters + "_":
+ k += "_"
+ else:
+ k += ch
+ return k
+
+
+def generate_any(*_):
+ return "typing.Any"
+
+
+def generate_shape(type_info, component_name: str, prop_name: str):
+ props = []
+ name = pascal_case(prop_name)
+
+ for prop_key, prop_type in type_info["value"].items():
+ typed = get_prop_typing(
+ prop_type["name"], component_name, f"{prop_name}_{prop_key}", prop_type
+ )
+ if not prop_type.get("required"):
+ props.append(f' "{prop_key}": NotRequired[{typed}]')
+ else:
+ props.append(f' "{prop_key}": {typed}')
+
+ shapes.setdefault(component_name, {})
+ shapes[component_name][name] = textwrap.indent(
+ shape_template.format(
+ name=name, values=" {\n" + ",\n".join(props) + "\n }"
+ ),
+ " ",
+ )
+
+ return f'"{name}"'
+
+
+def generate_union(type_info, component_name: str, prop_name: str):
+ types = []
+ for union in type_info["value"]:
+ u_type = get_prop_typing(union["name"], component_name, prop_name, union)
+ if u_type not in types:
+ types.append(u_type)
+ return f"typing.Union[{', '.join(types)}]"
+
+
+def generate_tuple(
+ type_info,
+ component_name: str,
+ prop_name: str,
+):
+ els = type_info.get("elements")
+ elements = ", ".join(
+ get_prop_typing(x.get("name"), component_name, prop_name, x) for x in els
+ )
+ return f"typing.Tuple[{elements}]"
+
+
+def generate_array_of(
+ type_info,
+ component_name: str,
+ prop_name: str,
+):
+ typed = get_prop_typing(
+ type_info["value"]["name"], component_name, prop_name, type_info["value"]
+ )
+ return f"typing.Sequence[{typed}]"
+
+
+def generate_object_of(type_info, component_name: str, prop_name: str):
+ typed = get_prop_typing(
+ type_info["value"]["name"], component_name, prop_name, type_info["value"]
+ )
+ return f"typing.Dict[typing.Union[str, float, int], {typed}]"
+
+
+def generate_type(typename):
+ def type_handler(*_):
+ return typename
+
+ return type_handler
+
+
+def _get_literal_value(value):
+ if isinstance(value, str):
+ value = json.loads(value.replace("'", '"'))
+
+ if value is None:
+ return "None"
+
+ if isinstance(value, bool):
+ return str(value)
+
+ return json.dumps(value)
+
+
+def generate_enum(type_info, *_):
+ values = [_get_literal_value(v["value"]) for v in type_info["value"] if v]
+ return f"Literal[{', '.join(values)}]"
+
+
+def generate_literal(type_info, *_):
+ return f"Literal[{json.dumps(type_info['value'])}]"
+
+
+def _get_custom_prop(custom_props, component_name, prop_name):
+ customs = custom_props.get(component_name) or custom_props.get("*", {})
+ return customs.get(prop_name)
+
+
+def get_prop_typing(
+ type_name: str,
+ component_name: str,
+ prop_name: str,
+ type_info,
+ custom_props=None,
+ custom_ignore=None,
+):
+ if prop_name == "id":
+ # Id is always the same either a string or a dict for pattern matching.
+ return "typing.Union[str, dict]"
+
+ if custom_props:
+ special = _get_custom_prop(custom_props, component_name, prop_name)
+ if special:
+ return special(type_info, component_name, prop_name)
+
+ if custom_ignore and prop_name in custom_ignore:
+ return "typing.Any"
+
+ prop_type = PROP_TYPING.get(type_name, generate_any)(
+ type_info, component_name, prop_name
+ )
+ return prop_type
+
+
+PROP_TYPING = {
+ "array": generate_type("typing.Sequence"),
+ "arrayOf": generate_array_of,
+ "object": generate_type("dict"),
+ "shape": generate_shape,
+ "exact": generate_shape,
+ "string": generate_type("str"),
+ "bool": generate_type("bool"),
+ "number": generate_type("NumberType"),
+ "node": generate_type("ComponentType"),
+ "func": generate_any,
+ "element": generate_type("Component"),
+ "union": generate_union,
+ "any": generate_any,
+ "custom": generate_any,
+ "enum": generate_enum,
+ "objectOf": generate_object_of,
+ "tuple": generate_tuple,
+ "literal": generate_literal,
+}
diff --git a/venv/lib/python3.8/site-packages/dash/development/_r_components_generation.py b/venv/lib/python3.8/site-packages/dash/development/_r_components_generation.py
new file mode 100644
index 0000000..e279250
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/_r_components_generation.py
@@ -0,0 +1,1005 @@
+# pylint: disable=consider-using-f-string
+# type: ignore
+import os
+import sys
+import shutil
+import importlib
+import textwrap
+import re
+import warnings
+
+from ._all_keywords import r_keywords
+from ._py_components_generation import reorder_props
+
+
+# Declaring longer string templates as globals to improve
+# readability, make method logic clearer to anyone inspecting
+# code below
+r_component_string = """#' @export
+{funcname} <- function({default_argtext}{wildcards}) {{
+ {wildcard_declaration}
+ props <- list({default_paramtext}{wildcards})
+ if (length(props) > 0) {{
+ props <- props[!vapply(props, is.null, logical(1))]
+ }}
+ component <- list(
+ props = props,
+ type = '{name}',
+ namespace = '{project_shortname}',
+ propNames = c({prop_names}{wildcard_names}),
+ package = '{package_name}'
+ )
+
+ structure(component, class = c('dash_component', 'list'))
+}}
+""" # noqa:E501
+
+# the following strings represent all the elements in an object
+# of the html_dependency class, which will be propagated by
+# iterating over _js_dist in __init__.py
+frame_open_template = """.{rpkgname}_js_metadata <- function() {{
+deps_metadata <- list("""
+
+frame_element_template = """`{dep_name}` = structure(list(name = "{dep_name}",
+version = "{project_ver}", src = list(href = NULL,
+file = "deps"), meta = NULL,
+script = {script_name},
+stylesheet = {css_name}, head = NULL, attachment = NULL, package = "{rpkgname}",
+all_files = FALSE{async_or_dynamic}), class = "html_dependency")""" # noqa:E501
+
+frame_body_template = """`{project_shortname}` = structure(list(name = "{project_shortname}",
+version = "{project_ver}", src = list(href = NULL,
+file = "deps"), meta = NULL,
+script = {script_name},
+stylesheet = {css_name}, head = NULL, attachment = NULL, package = "{rpkgname}",
+all_files = FALSE{async_or_dynamic}), class = "html_dependency")""" # noqa:E501
+
+frame_close_template = """)
+return(deps_metadata)
+}
+"""
+
+help_string = """% Auto-generated: do not edit by hand
+\\name{{{funcname}}}
+
+\\alias{{{funcname}}}
+
+\\title{{{name} component}}
+
+\\description{{
+{description}
+}}
+
+\\usage{{
+{funcname}({default_argtext})
+}}
+
+\\arguments{{
+{item_text}
+}}
+
+\\value{{{value_text}}}
+
+"""
+
+description_template = """Package: {package_name}
+Title: {package_title}
+Version: {package_version}
+Description: {package_description}
+Depends: R (>= 3.0.2){package_depends}
+Imports: {package_imports}
+Suggests: {package_suggests}{package_rauthors}
+License: {package_license}{package_copyright}
+URL: {package_url}
+BugReports: {package_issues}
+Encoding: UTF-8
+LazyData: true{vignette_builder}
+KeepSource: true
+"""
+
+rbuild_ignore_string = r"""# ignore JS config files/folders
+node_modules/
+coverage/
+src/
+lib/
+.babelrc
+.builderrc
+.eslintrc
+.npmignore
+.editorconfig
+.eslintignore
+.prettierrc
+.circleci
+.github
+
+# demo folder has special meaning in R
+# this should hopefully make it still
+# allow for the possibility to make R demos
+demo/.*\.js
+demo/.*\.html
+demo/.*\.css
+
+# ignore Python files/folders
+setup.py
+usage.py
+setup.py
+requirements.txt
+MANIFEST.in
+CHANGELOG.md
+test/
+# CRAN has weird LICENSE requirements
+LICENSE.txt
+^.*\.Rproj$
+^\.Rproj\.user$
+"""
+
+pkghelp_stub = """% Auto-generated: do not edit by hand
+\\docType{{package}}
+\\name{{{package_name}-package}}
+\\alias{{{package_name}}}
+\\title{{{pkg_help_title}}}
+\\description{{
+{pkg_help_description}
+}}
+\\author{{
+\\strong{{Maintainer}}: {maintainer}
+}}
+"""
+
+wildcard_helper = """
+dash_assert_valid_wildcards <- function (attrib = list("data", "aria"), ...)
+{
+ args <- list(...)
+ validation_results <- lapply(names(args), function(x) {
+ grepl(paste0("^(", paste0(attrib, collapse="|"), ")-[a-zA-Z0-9_-]+$"),
+ x)
+ })
+ if (FALSE %in% validation_results) {
+ stop(sprintf("The following props are not valid in this component: '%s'",
+ paste(names(args)[grepl(FALSE, unlist(validation_results))],
+ collapse = ", ")), call. = FALSE)
+ }
+ else {
+ return(args)
+ }
+}
+""" # noqa:E501
+
+wildcard_template = """
+ wildcard_names = names(dash_assert_valid_wildcards(attrib = list({}), ...))
+"""
+
+wildcard_help_template = """
+
+
+\\item{{...}}{{wildcards allowed have the form: `{}`}}
+"""
+
+
+# pylint: disable=R0914
+def generate_class_string(name, props, project_shortname, prefix):
+ # Here we convert from snake case to camel case
+ package_name = snake_case_to_camel_case(project_shortname)
+
+ # Ensure props are ordered with children first
+ props = reorder_props(props=props)
+
+ prop_keys = list(props.keys())
+
+ wildcards = ""
+ wildcard_declaration = ""
+ wildcard_names = ""
+ default_paramtext = ""
+ default_argtext = ""
+ accepted_wildcards = ""
+
+ if any(key.endswith("-*") for key in prop_keys):
+ accepted_wildcards = get_wildcards_r(prop_keys)
+ wildcards = ", ..."
+ wildcard_declaration = wildcard_template.format(
+ accepted_wildcards.replace("-*", "")
+ )
+ wildcard_names = ", wildcard_names"
+
+ # Produce a string with all property names other than WCs
+ prop_names = ", ".join(
+ "'{}'".format(p) for p in prop_keys if "*" not in p and p not in ["setProps"]
+ )
+
+ # Filter props to remove those we don't want to expose
+ for item in prop_keys[:]:
+ if item.endswith("-*") or item == "setProps":
+ prop_keys.remove(item)
+ elif item in r_keywords:
+ prop_keys.remove(item)
+ warnings.warn(
+ (
+ 'WARNING: prop "{}" in component "{}" is an R keyword'
+ " - REMOVED FROM THE R COMPONENT"
+ ).format(item, name)
+ )
+
+ default_argtext += ", ".join("{}=NULL".format(p) for p in prop_keys)
+
+ # pylint: disable=C0301
+ default_paramtext += ", ".join(
+ "{0}={0}".format(p) if p != "children" else "{}=children".format(p)
+ for p in prop_keys
+ )
+
+ return r_component_string.format(
+ funcname=format_fn_name(prefix, name),
+ name=name,
+ default_argtext=default_argtext,
+ wildcards=wildcards,
+ wildcard_declaration=wildcard_declaration,
+ default_paramtext=default_paramtext,
+ project_shortname=project_shortname,
+ prop_names=prop_names,
+ wildcard_names=wildcard_names,
+ package_name=package_name,
+ )
+
+
+# pylint: disable=R0914
+def generate_js_metadata(pkg_data, project_shortname):
+ """Dynamically generate R function to supply JavaScript and CSS dependency
+ information required by the dash package for R.
+
+ Parameters
+ ----------
+ project_shortname = component library name, in snake case
+
+ Returns
+ -------
+ function_string = complete R function code to provide component features
+ """
+ # make sure the module we're building is available to Python,
+ # even if it hasn't been installed yet
+ sys.path.insert(0, os.getcwd())
+ mod = importlib.import_module(project_shortname)
+
+ alldist = getattr(mod, "_js_dist", []) + getattr(mod, "_css_dist", [])
+
+ project_ver = pkg_data.get("version")
+
+ rpkgname = snake_case_to_camel_case(project_shortname)
+
+ # since _js_dist may suggest more than one dependency, need
+ # a way to iterate over all dependencies for a given set.
+ # here we define an opening, element, and closing string --
+ # if the total number of dependencies > 1, we can concatenate
+ # them and write a list object in R with multiple elements
+ function_frame_open = frame_open_template.format(rpkgname=rpkgname)
+
+ function_frame = []
+ function_frame_body = []
+
+ # pylint: disable=consider-using-enumerate
+ if len(alldist) > 1:
+ for dep in range(len(alldist)):
+ curr_dep = alldist[dep]
+ rpp = curr_dep.get("relative_package_path", "")
+ if not rpp:
+ continue
+
+ async_or_dynamic = get_async_type(curr_dep)
+
+ if "dash_" in rpp:
+ dep_name = rpp.split(".")[0]
+ else:
+ dep_name = "{}".format(project_shortname)
+
+ if "css" in rpp:
+ css_name = "'{}'".format(rpp)
+ script_name = "NULL"
+ else:
+ script_name = "'{}'".format(rpp)
+ css_name = "NULL"
+
+ function_frame += [
+ frame_element_template.format(
+ dep_name=dep_name,
+ project_ver=project_ver,
+ rpkgname=rpkgname,
+ project_shortname=project_shortname,
+ script_name=script_name,
+ css_name=css_name,
+ async_or_dynamic=async_or_dynamic,
+ )
+ ]
+ function_frame_body = ",\n".join(function_frame)
+ elif len(alldist) == 1:
+ dep = alldist[0]
+ rpp = dep["relative_package_path"]
+
+ async_or_dynamic = get_async_type(dep)
+
+ if "css" in rpp:
+ css_name = "'{}'".format(rpp)
+ script_name = "NULL"
+ else:
+ script_name = "'{}'".format(rpp)
+ css_name = "NULL"
+
+ function_frame_body = frame_body_template.format(
+ project_shortname=project_shortname,
+ project_ver=project_ver,
+ rpkgname=rpkgname,
+ script_name=script_name,
+ css_name=css_name,
+ async_or_dynamic=async_or_dynamic,
+ )
+
+ function_string = "".join(
+ [function_frame_open, function_frame_body, frame_close_template]
+ )
+
+ return function_string
+
+
+# determine whether dependency uses async or dynamic flag
+# then return the properly formatted string if so, i.e.
+# " async = TRUE,". a dependency can have async or
+# dynamic elements, neither of these, but never both.
+def get_async_type(dep):
+ async_or_dynamic = ""
+ for key in dep.keys():
+ if key in ["async", "dynamic"]:
+ keyval = dep[key]
+ if not isinstance(keyval, bool):
+ keyval = "'{}'".format(keyval.lower())
+ else:
+ keyval = str(keyval).upper()
+ async_or_dynamic = ", {} = {}".format(key, keyval)
+ return async_or_dynamic
+
+
+# This method wraps code within arbitrary LaTeX-like tags, which are used
+# by R's internal help parser for constructing man pages
+def wrap(tag, code):
+ if tag == "":
+ return code
+ return "\\{}{{\n{}}}".format(tag, code)
+
+
+def write_help_file(name, props, description, prefix, rpkg_data):
+ """Write R documentation file (.Rd) given component name and properties.
+
+ Parameters
+ ----------
+ name = the name of the Dash component for which a help file is generated
+ props = the properties of the component
+ description = the component's description, inserted into help file header
+ prefix = the DashR library prefix (optional, can be a blank string)
+ rpkg_data = package metadata (optional)
+
+ Returns
+ -------
+ writes an R help file to the man directory for the generated R package
+ """
+ funcname = format_fn_name(prefix, name)
+ file_name = funcname + ".Rd"
+
+ wildcards = ""
+ default_argtext = ""
+ item_text = ""
+ accepted_wildcards = ""
+
+ # the return value of all Dash components should be the same,
+ # in an abstract sense -- they produce a list
+ value_text = "named list of JSON elements corresponding to React.js properties and their values" # noqa:E501
+
+ prop_keys = list(props.keys())
+
+ if any(key.endswith("-*") for key in prop_keys):
+ accepted_wildcards = get_wildcards_r(prop_keys)
+ wildcards = ", ..."
+
+ # Filter props to remove those we don't want to expose
+ for item in prop_keys[:]:
+ if item.endswith("-*") or item in r_keywords or item == "setProps":
+ prop_keys.remove(item)
+
+ default_argtext += ", ".join("{}=NULL".format(p) for p in prop_keys)
+
+ item_text += "\n\n".join(
+ "\\item{{{}}}{{{}{}}}".format(
+ p, print_r_type(props[p]["type"]), props[p]["description"]
+ )
+ for p in prop_keys
+ )
+
+ # auto-replace any unescaped backslashes for compatibility with R docs
+ description = re.sub(r"(?<!\\)%", "\\%", description)
+ item_text = re.sub(r"(?<!\\)%", "\\%", item_text)
+
+ # scrub examples which begin with **Example Usage**, as these should be
+ # provided as R code within dash-info.yaml
+ if "**Example Usage**" in description:
+ description = description.split("**Example Usage**")[0].rstrip()
+
+ if wildcards == ", ...":
+ default_argtext += wildcards
+ item_text += wildcard_help_template.format(accepted_wildcards)
+
+ # in R, the online help viewer does not properly wrap lines for
+ # the usage string -- we will hard wrap at 60 characters using
+ # textwrap.fill, starting from the beginning of the usage string
+
+ file_path = os.path.join("man", file_name)
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(
+ help_string.format(
+ funcname=funcname,
+ name=name,
+ default_argtext=textwrap.fill(
+ default_argtext, width=60, break_long_words=False
+ ),
+ item_text=item_text,
+ value_text=value_text,
+ description=description.replace("\n", " "),
+ )
+ )
+ if rpkg_data is not None and "r_examples" in rpkg_data:
+ ex = rpkg_data.get("r_examples")
+ the_ex = ([e for e in ex if e.get("name") == funcname] or [None])[0]
+ result = ""
+ if the_ex and "code" in the_ex.keys():
+ result += wrap(
+ "examples",
+ wrap("dontrun" if the_ex.get("dontrun") else "", the_ex["code"]),
+ )
+ with open(file_path, "a+", encoding="utf-8") as fa:
+ fa.write(result + "\n")
+
+
+# pylint: disable=too-many-arguments
+def write_class_file(
+ name,
+ props,
+ description,
+ project_shortname,
+ prefix=None,
+ rpkg_data=None,
+):
+ props = reorder_props(props=props)
+
+ # generate the R help pages for each of the Dash components that we
+ # are transpiling -- this is done to avoid using Roxygen2 syntax,
+ # we may eventually be able to generate similar documentation using
+ # doxygen and an R plugin, but for now we'll just do it on our own
+ # from within Python
+ write_help_file(name, props, description, prefix, rpkg_data)
+
+ import_string = "# AUTO GENERATED FILE - DO NOT EDIT\n\n"
+ class_string = generate_class_string(name, props, project_shortname, prefix)
+
+ file_name = format_fn_name(prefix, name) + ".R"
+
+ file_path = os.path.join("R", file_name)
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(import_string)
+ f.write(class_string)
+
+ print("Generated {}".format(file_name))
+
+
+def write_js_metadata(pkg_data, project_shortname, has_wildcards):
+ """Write an internal (not exported) R function to return all JS
+ dependencies as required by dash.
+
+ Parameters
+ ----------
+ project_shortname = hyphenated string, e.g. dash-html-components
+
+ Returns
+ -------
+ """
+ function_string = generate_js_metadata(
+ pkg_data=pkg_data, project_shortname=project_shortname
+ )
+ file_name = "internal.R"
+
+ # the R source directory for the package won't exist on first call
+ # create the R directory if it is missing
+ if not os.path.exists("R"):
+ os.makedirs("R")
+
+ file_path = os.path.join("R", file_name)
+ with open(file_path, "w", encoding="utf-8") as f:
+ f.write(function_string)
+ if has_wildcards:
+ f.write(wildcard_helper)
+
+ # now copy over all JS dependencies from the (Python) components dir
+ # the inst/lib directory for the package won't exist on first call
+ # create this directory if it is missing
+ if os.path.exists("inst/deps"):
+ shutil.rmtree("inst/deps")
+
+ os.makedirs("inst/deps")
+
+ for rel_dirname, _, filenames in os.walk(project_shortname):
+ for filename in filenames:
+ extension = os.path.splitext(filename)[1]
+
+ if extension in [".py", ".pyc", ".json"]:
+ continue
+
+ target_dirname = os.path.join(
+ os.path.join(
+ "inst/deps/", os.path.relpath(rel_dirname, project_shortname)
+ )
+ )
+
+ if not os.path.exists(target_dirname):
+ os.makedirs(target_dirname)
+
+ shutil.copy(os.path.join(rel_dirname, filename), target_dirname)
+
+
+# pylint: disable=R0914, R0913, R0912, R0915
+def generate_rpkg(
+ pkg_data,
+ rpkg_data,
+ project_shortname,
+ export_string,
+ package_depends,
+ package_imports,
+ package_suggests,
+ has_wildcards,
+):
+ """Generate documents for R package creation.
+
+ Parameters
+ ----------
+ pkg_data
+ rpkg_data
+ project_shortname
+ export_string
+ package_depends
+ package_imports
+ package_suggests
+ has_wildcards
+
+ Returns
+ -------
+ """
+ # Leverage package.json to import specifics which are also applicable
+ # to R package that we're generating here, use .get in case the key
+ # does not exist in package.json
+
+ package_name = snake_case_to_camel_case(project_shortname)
+ package_copyright = ""
+ package_rauthors = ""
+ lib_name = pkg_data.get("name")
+
+ if rpkg_data is not None:
+ if rpkg_data.get("pkg_help_title"):
+ package_title = rpkg_data.get(
+ "pkg_help_title", pkg_data.get("description", "")
+ )
+ if rpkg_data.get("pkg_help_description"):
+ package_description = rpkg_data.get(
+ "pkg_help_description", pkg_data.get("description", "")
+ )
+ if rpkg_data.get("pkg_copyright"):
+ package_copyright = "\nCopyright: {}".format(
+ rpkg_data.get("pkg_copyright", "")
+ )
+ else:
+ # fall back to using description in package.json, if present
+ package_title = pkg_data.get("description", "")
+ package_description = pkg_data.get("description", "")
+
+ package_version = pkg_data.get("version", "0.0.1")
+
+ # remove leading and trailing commas, add space after comma if missing
+ if package_depends:
+ package_depends = ", " + package_depends.strip(",").lstrip()
+ package_depends = re.sub(r"(,(?![ ]))", ", ", package_depends)
+
+ if package_imports:
+ package_imports = package_imports.strip(",").lstrip()
+ package_imports = re.sub(r"(,(?![ ]))", ", ", package_imports)
+
+ if package_suggests:
+ package_suggests = package_suggests.strip(",").lstrip()
+ package_suggests = re.sub(r"(,(?![ ]))", ", ", package_suggests)
+
+ if "bugs" in pkg_data:
+ package_issues = pkg_data["bugs"].get("url", "")
+ else:
+ package_issues = ""
+ print(
+ "Warning: a URL for bug reports was "
+ "not provided. Empty string inserted.",
+ file=sys.stderr,
+ )
+
+ if "homepage" in pkg_data:
+ package_url = pkg_data.get("homepage", "")
+ else:
+ package_url = ""
+ print(
+ "Warning: a homepage URL was not provided. Empty string inserted.",
+ file=sys.stderr,
+ )
+
+ package_author = pkg_data.get("author")
+
+ package_author_name = package_author.split(" <")[0]
+ package_author_email = package_author.split(" <")[1][:-1]
+
+ package_author_fn = package_author_name.split(" ")[0]
+ package_author_ln = package_author_name.rsplit(" ", 2)[-1]
+
+ maintainer = pkg_data.get("maintainer", pkg_data.get("author"))
+
+ if "<" not in package_author:
+ print(
+ "Error, aborting R package generation: "
+ "R packages require a properly formatted author field "
+ "or installation will fail. Please include an email "
+ "address enclosed within < > brackets in package.json. ",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ if rpkg_data is not None:
+ if rpkg_data.get("pkg_authors"):
+ package_rauthors = "\nAuthors@R: {}".format(
+ rpkg_data.get("pkg_authors", "")
+ )
+ else:
+ package_rauthors = '\nAuthors@R: person("{}", "{}", role = c("aut", "cre"), email = "{}")'.format(
+ package_author_fn, package_author_ln, package_author_email
+ )
+
+ if not (os.path.isfile("LICENSE") or os.path.isfile("LICENSE.txt")):
+ package_license = pkg_data.get("license", "")
+ else:
+ package_license = pkg_data.get("license", "") + " + file LICENSE"
+ # R requires that the LICENSE.txt file be named LICENSE
+ if not os.path.isfile("LICENSE"):
+ os.symlink("LICENSE.txt", "LICENSE")
+
+ import_string = "# AUTO GENERATED FILE - DO NOT EDIT\n\n"
+ packages_string = ""
+
+ rpackage_list = package_depends.split(", ") + package_imports.split(", ")
+ rpackage_list = filter(bool, rpackage_list)
+
+ if rpackage_list:
+ for rpackage in rpackage_list:
+ packages_string += "\nimport({})\n".format(rpackage)
+
+ if os.path.exists("vignettes"):
+ vignette_builder = "\nVignetteBuilder: knitr"
+ if "knitr" not in package_suggests and "rmarkdown" not in package_suggests:
+ package_suggests += ", knitr, rmarkdown"
+ package_suggests = package_suggests.lstrip(", ")
+ else:
+ vignette_builder = ""
+
+ pkghelp_stub_path = os.path.join("man", package_name + "-package.Rd")
+
+ # generate the internal (not exported to the user) functions which
+ # supply the JavaScript dependencies to the dash package.
+ # this avoids having to generate an RData file from within Python.
+ write_js_metadata(pkg_data, project_shortname, has_wildcards)
+
+ with open("NAMESPACE", "w+", encoding="utf-8") as f:
+ f.write(import_string)
+ f.write(export_string)
+ f.write(packages_string)
+
+ with open(".Rbuildignore", "w+", encoding="utf-8") as f2:
+ f2.write(rbuild_ignore_string)
+
+ description_string = description_template.format(
+ package_name=package_name,
+ package_title=package_title,
+ package_description=package_description,
+ package_version=package_version,
+ package_rauthors=package_rauthors,
+ package_depends=package_depends,
+ package_imports=package_imports,
+ package_suggests=package_suggests,
+ package_license=package_license,
+ package_copyright=package_copyright,
+ package_url=package_url,
+ package_issues=package_issues,
+ vignette_builder=vignette_builder,
+ )
+
+ with open("DESCRIPTION", "w+", encoding="utf-8") as f3:
+ f3.write(description_string)
+
+ if rpkg_data is not None:
+ if rpkg_data.get("pkg_help_description"):
+ pkghelp = pkghelp_stub.format(
+ package_name=package_name,
+ pkg_help_title=rpkg_data.get("pkg_help_title"),
+ pkg_help_description=rpkg_data.get("pkg_help_description"),
+ lib_name=lib_name,
+ maintainer=maintainer,
+ )
+ with open(pkghelp_stub_path, "w", encoding="utf-8") as f4:
+ f4.write(pkghelp)
+
+
+# This converts a string from snake case to camel case
+# Not required for R package name to be in camel case,
+# but probably more conventional this way
+def snake_case_to_camel_case(namestring):
+ s = namestring.split("_")
+ return s[0] + "".join(w.capitalize() for w in s[1:])
+
+
+# this logic will permit passing blank R prefixes to
+# dash-generate-components, while also enforcing
+# camelCase for the resulting functions; if a prefix
+# is supplied, leave it as-is
+def format_fn_name(prefix, name):
+ if prefix:
+ return prefix + snake_case_to_camel_case(name)
+ return snake_case_to_camel_case(name[0].lower() + name[1:])
+
+
+# pylint: disable=unused-argument
+def generate_exports(
+ project_shortname,
+ components,
+ metadata,
+ pkg_data,
+ rpkg_data,
+ prefix,
+ package_depends,
+ package_imports,
+ package_suggests,
+ **kwargs
+):
+ export_string = make_namespace_exports(components, prefix)
+
+ # Look for wildcards in the metadata
+ has_wildcards = False
+ for component_data in metadata.values():
+ if any(key.endswith("-*") for key in component_data["props"]):
+ has_wildcards = True
+ break
+
+ # now, bundle up the package information and create all the requisite
+ # elements of an R package, so that the end result is installable either
+ # locally or directly from GitHub
+ generate_rpkg(
+ pkg_data,
+ rpkg_data,
+ project_shortname,
+ export_string,
+ package_depends,
+ package_imports,
+ package_suggests,
+ has_wildcards,
+ )
+
+
+def make_namespace_exports(components, prefix):
+ export_string = ""
+ for component in components:
+ if (
+ not component.endswith("-*")
+ and str(component) not in r_keywords
+ and str(component) not in ["setProps", "children"]
+ ):
+ export_string += "export({}{})\n".format(prefix, component)
+
+ # the following lines enable rudimentary support for bundling in
+ # R functions that are not automatically generated by the transpiler
+ # such that functions contained in the R subdirectory are exported,
+ # so long as they are not in utils.R.
+ rfilelist = []
+ omitlist = ["utils.R", "internal.R"] + [
+ "{}{}.R".format(prefix, component) for component in components
+ ]
+ fnlist = []
+
+ for script in os.listdir("R"):
+ if script.endswith(".R") and script not in omitlist:
+ rfilelist += [os.path.join("R", script)]
+
+ for rfile in rfilelist:
+ with open(rfile, "r", encoding="utf-8") as script:
+ s = script.read()
+
+ # remove comments
+ s = re.sub("#.*$", "", s, flags=re.M)
+
+ # put the whole file on one line
+ s = s.replace("\n", " ").replace("\r", " ")
+
+ # empty out strings, in case of unmatched block terminators
+ s = re.sub(r"'([^'\\]|\\'|\\[^'])*'", "''", s)
+ s = re.sub(r'"([^"\\]|\\"|\\[^"])*"', '""', s)
+
+ # empty out block terminators () and {}
+ # so we don't catch nested functions, or functions as arguments
+ # repeat until it stops changing, in case of multiply nested blocks
+ prev_len = len(s) + 1
+ while len(s) < prev_len:
+ prev_len = len(s)
+ s = re.sub(r"\(([^()]|\(\))*\)", "()", s)
+ s = re.sub(r"\{([^{}]|\{\})*\}", "{}", s)
+
+ # now, in whatever is left, look for functions
+ matches = re.findall(
+ # in R, either = or <- may be used to create and assign objects
+ r"([^A-Za-z0-9._]|^)([A-Za-z0-9._]+)\s*(=|<-)\s*function",
+ s,
+ )
+ for match in matches:
+ fn = match[1]
+ # Allow users to mark functions as private by prefixing with .
+ if fn[0] != "." and fn not in fnlist:
+ fnlist.append(fn)
+
+ export_string += "\n".join("export({})".format(function) for function in fnlist)
+ return export_string
+
+
+def get_r_prop_types(type_object):
+ """Mapping from the PropTypes js type object to the R type."""
+
+ def shape_or_exact():
+ return "lists containing elements {}.\n{}".format(
+ ", ".join("'{}'".format(t) for t in type_object["value"]),
+ "Those elements have the following types:\n{}".format(
+ "\n".join(
+ create_prop_docstring_r(
+ prop_name=prop_name,
+ type_object=prop,
+ required=prop["required"],
+ description=prop.get("description", ""),
+ indent_num=1,
+ )
+ for prop_name, prop in type_object["value"].items()
+ )
+ ),
+ )
+
+ return dict(
+ array=lambda: "unnamed list",
+ bool=lambda: "logical",
+ number=lambda: "numeric",
+ string=lambda: "character",
+ object=lambda: "named list",
+ any=lambda: "logical | numeric | character | named list | unnamed list",
+ element=lambda: "dash component",
+ node=lambda: "a list of or a singular dash component, string or number",
+ # React's PropTypes.oneOf
+ enum=lambda: "a value equal to: {}".format(
+ ", ".join("{}".format(str(t["value"])) for t in type_object["value"])
+ ),
+ # React's PropTypes.oneOfType
+ union=lambda: "{}".format(
+ " | ".join(
+ "{}".format(get_r_type(subType))
+ for subType in type_object["value"]
+ if get_r_type(subType) != ""
+ )
+ ),
+ # React's PropTypes.arrayOf
+ arrayOf=lambda: (
+ "list"
+ + (
+ " of {}s".format(get_r_type(type_object["value"]))
+ if get_r_type(type_object["value"]) != ""
+ else ""
+ )
+ ),
+ # React's PropTypes.objectOf
+ objectOf=lambda: "list with named elements and values of type {}".format(
+ get_r_type(type_object["value"])
+ ),
+ # React's PropTypes.shape
+ shape=shape_or_exact,
+ # React's PropTypes.exact
+ exact=shape_or_exact,
+ )
+
+
+def get_r_type(type_object, is_flow_type=False, indent_num=0):
+ """
+ Convert JS types to R types for the component definition
+ Parameters
+ ----------
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ is_flow_type: bool
+ indent_num: int
+ Number of indents to use for the docstring for the prop
+ Returns
+ -------
+ str
+ Python type string
+ """
+ js_type_name = type_object["name"]
+ js_to_r_types = get_r_prop_types(type_object=type_object)
+ if (
+ "computed" in type_object
+ and type_object["computed"]
+ or type_object.get("type", "") == "function"
+ ):
+ return ""
+ if js_type_name in js_to_r_types:
+ prop_type = js_to_r_types[js_type_name]()
+ return prop_type
+ return ""
+
+
+def print_r_type(typedata):
+ typestring = get_r_type(typedata).capitalize()
+ if typestring:
+ typestring += ". "
+ return typestring
+
+
+# pylint: disable=too-many-arguments
+def create_prop_docstring_r(
+ prop_name, type_object, required, description, indent_num, is_flow_type=False
+):
+ """
+ Create the Dash component prop docstring
+ Parameters
+ ----------
+ prop_name: str
+ Name of the Dash component prop
+ type_object: dict
+ react-docgen-generated prop type dictionary
+ required: bool
+ Component is required?
+ description: str
+ Dash component description
+ indent_num: int
+ Number of indents to use for the context block
+ (creates 2 spaces for every indent)
+ is_flow_type: bool
+ Does the prop use Flow types? Otherwise, uses PropTypes
+ Returns
+ -------
+ str
+ Dash component prop docstring
+ """
+ r_type_name = get_r_type(
+ type_object=type_object, is_flow_type=is_flow_type, indent_num=indent_num + 1
+ )
+
+ indent_spacing = " " * indent_num
+ if "\n" in r_type_name:
+ return (
+ "{indent_spacing}- {name} ({is_required}): {description}. "
+ "{name} has the following type: {type}".format(
+ indent_spacing=indent_spacing,
+ name=prop_name,
+ type=r_type_name,
+ description=description,
+ is_required="required" if required else "optional",
+ )
+ )
+ return "{indent_spacing}- {name} ({type}{is_required}){description}".format(
+ indent_spacing=indent_spacing,
+ name=prop_name,
+ type="{}; ".format(r_type_name) if r_type_name else "",
+ description=(": {}".format(description) if description != "" else ""),
+ is_required="required" if required else "optional",
+ )
+
+
+def get_wildcards_r(prop_keys):
+ wildcards = ""
+ wildcards += ", ".join("'{}'".format(p) for p in prop_keys if p.endswith("-*"))
+
+ if wildcards == "":
+ wildcards = "NULL"
+ return wildcards
diff --git a/venv/lib/python3.8/site-packages/dash/development/base_component.py b/venv/lib/python3.8/site-packages/dash/development/base_component.py
new file mode 100644
index 0000000..975acfd
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/base_component.py
@@ -0,0 +1,481 @@
+import abc
+import collections
+import inspect
+import sys
+import typing
+import uuid
+import random
+import warnings
+import textwrap
+
+from .._utils import patch_collections_abc, stringify_id, OrderedSet
+
+MutableSequence = patch_collections_abc("MutableSequence")
+
+rd = random.Random(0)
+
+_deprecated_components = {
+ "dash_core_components": {
+ "LogoutButton": textwrap.dedent(
+ """
+ The Logout Button is no longer used with Dash Enterprise and can be replaced with a html.Button or html.A.
+ eg: html.A(href=os.getenv('DASH_LOGOUT_URL'))
+ """
+ )
+ }
+}
+
+
+# pylint: disable=no-init,too-few-public-methods
+class ComponentRegistry:
+ """Holds a registry of the namespaces used by components."""
+
+ registry = OrderedSet()
+ children_props = collections.defaultdict(dict)
+ namespace_to_package = {}
+
+ @classmethod
+ def get_resources(cls, resource_name, includes=None):
+ resources = []
+
+ for module_name in cls.registry:
+ if includes is not None and module_name not in includes:
+ continue
+ module = sys.modules[module_name]
+ resources.extend(getattr(module, resource_name, []))
+
+ return resources
+
+
+class ComponentMeta(abc.ABCMeta):
+
+ # pylint: disable=arguments-differ
+ def __new__(mcs, name, bases, attributes):
+ module = attributes["__module__"].split(".")[0]
+
+ if attributes.get("_explicitize_dash_init", False):
+ # We only want to patch the new generated component without
+ # the `@_explicitize_args` decorator for mypy support
+ # See issue: https://github.com/plotly/dash/issues/3226
+ # Only for component that were generated by 3.0.3
+ # Better to setattr on the component afterwards to ensure
+ # backward compatibility.
+ attributes["__init__"] = _explicitize_args(attributes["__init__"])
+
+ _component = abc.ABCMeta.__new__(mcs, name, bases, attributes)
+
+ if name == "Component" or module == "builtins":
+ # Don't add to the registry the base component
+ # and the components loaded dynamically by load_component
+ # as it doesn't have the namespace.
+ return _component
+
+ _namespace = attributes.get("_namespace", module)
+ ComponentRegistry.namespace_to_package[_namespace] = module
+ ComponentRegistry.registry.add(module)
+ ComponentRegistry.children_props[_namespace][name] = attributes.get(
+ "_children_props"
+ )
+
+ return _component
+
+
+def is_number(s):
+ try:
+ float(s)
+ return True
+ except ValueError:
+ return False
+
+
+def _check_if_has_indexable_children(item):
+ if not hasattr(item, "children") or (
+ not isinstance(item.children, Component)
+ and not isinstance(item.children, (tuple, MutableSequence))
+ ):
+
+ raise KeyError
+
+
+class Component(metaclass=ComponentMeta):
+ _children_props = []
+ _base_nodes = ["children"]
+ _namespace: str
+ _type: str
+ _prop_names: typing.List[str]
+
+ _valid_wildcard_attributes: typing.List[str]
+ available_wildcard_properties: typing.List[str]
+
+ class _UNDEFINED:
+ def __repr__(self):
+ return "undefined"
+
+ def __str__(self):
+ return "undefined"
+
+ UNDEFINED = _UNDEFINED()
+
+ class _REQUIRED:
+ def __repr__(self):
+ return "required"
+
+ def __str__(self):
+ return "required"
+
+ REQUIRED = _REQUIRED()
+
+ def __init__(self, **kwargs):
+ self._validate_deprecation()
+ import dash # pylint: disable=import-outside-toplevel, cyclic-import
+
+ for k, v in list(kwargs.items()):
+ # pylint: disable=no-member
+ k_in_propnames = k in self._prop_names
+ k_in_wildcards = any(
+ k.startswith(w) for w in self._valid_wildcard_attributes
+ )
+ # e.g. "The dash_core_components.Dropdown component (version 1.6.0)
+ # with the ID "my-dropdown"
+ id_suffix = f' with the ID "{kwargs["id"]}"' if "id" in kwargs else ""
+ try:
+ # Get fancy error strings that have the version numbers
+ error_string_prefix = "The `{}.{}` component (version {}){}"
+ # These components are part of dash now, so extract the dash version:
+ dash_packages = {
+ "dash_html_components": "html",
+ "dash_core_components": "dcc",
+ "dash_table": "dash_table",
+ }
+ if self._namespace in dash_packages:
+ error_string_prefix = error_string_prefix.format(
+ dash_packages[self._namespace],
+ self._type,
+ dash.__version__,
+ id_suffix,
+ )
+ else:
+ # Otherwise import the package and extract the version number
+ error_string_prefix = error_string_prefix.format(
+ self._namespace,
+ self._type,
+ getattr(__import__(self._namespace), "__version__", "unknown"),
+ id_suffix,
+ )
+ except ImportError:
+ # Our tests create mock components with libraries that
+ # aren't importable
+ error_string_prefix = f"The `{self._type}` component{id_suffix}"
+
+ if not k_in_propnames and not k_in_wildcards:
+ allowed_args = ", ".join(
+ sorted(self._prop_names)
+ ) # pylint: disable=no-member
+ raise TypeError(
+ f"{error_string_prefix} received an unexpected keyword argument: `{k}`"
+ f"\nAllowed arguments: {allowed_args}"
+ )
+
+ if k not in self._base_nodes and isinstance(v, Component):
+ raise TypeError(
+ error_string_prefix
+ + " detected a Component for a prop other than `children`\n"
+ + f"Prop {k} has value {v!r}\n\n"
+ + "Did you forget to wrap multiple `children` in an array?\n"
+ + 'For example, it must be html.Div(["a", "b", "c"]) not html.Div("a", "b", "c")\n'
+ )
+
+ if k == "id":
+ if isinstance(v, dict):
+ for id_key, id_val in v.items():
+ if not isinstance(id_key, str):
+ raise TypeError(
+ "dict id keys must be strings,\n"
+ + f"found {id_key!r} in id {v!r}"
+ )
+ if not isinstance(id_val, (str, int, float, bool)):
+ raise TypeError(
+ "dict id values must be strings, numbers or bools,\n"
+ + f"found {id_val!r} in id {v!r}"
+ )
+ elif not isinstance(v, str):
+ raise TypeError(f"`id` prop must be a string or dict, not {v!r}")
+
+ setattr(self, k, v)
+
+ def _set_random_id(self):
+
+ if hasattr(self, "id"):
+ return getattr(self, "id")
+
+ kind = f"`{self._namespace}.{self._type}`" # pylint: disable=no-member
+
+ if getattr(self, "persistence", False):
+ raise RuntimeError(
+ f"""
+ Attempting to use an auto-generated ID with the `persistence` prop.
+ This is prohibited because persistence is tied to component IDs and
+ auto-generated IDs can easily change.
+
+ Please assign an explicit ID to this {kind} component.
+ """
+ )
+ if "dash_snapshots" in sys.modules:
+ raise RuntimeError(
+ f"""
+ Attempting to use an auto-generated ID in an app with `dash_snapshots`.
+ This is prohibited because snapshots saves the whole app layout,
+ including component IDs, and auto-generated IDs can easily change.
+ Callbacks referencing the new IDs will not work with old snapshots.
+
+ Please assign an explicit ID to this {kind} component.
+ """
+ )
+
+ v = str(uuid.UUID(int=rd.randint(0, 2**128)))
+ setattr(self, "id", v)
+ return v
+
+ def to_plotly_json(self):
+ # Add normal properties
+ props = {
+ p: getattr(self, p)
+ for p in self._prop_names # pylint: disable=no-member
+ if hasattr(self, p)
+ }
+ # Add the wildcard properties data-* and aria-*
+ props.update(
+ {
+ k: getattr(self, k)
+ for k in self.__dict__
+ if any(
+ k.startswith(w)
+ # pylint:disable=no-member
+ for w in self._valid_wildcard_attributes
+ )
+ }
+ )
+ as_json = {
+ "props": props,
+ "type": self._type, # pylint: disable=no-member
+ "namespace": self._namespace, # pylint: disable=no-member
+ }
+
+ return as_json
+
+ # pylint: disable=too-many-branches, too-many-return-statements
+ # pylint: disable=redefined-builtin, inconsistent-return-statements
+ def _get_set_or_delete(self, id, operation, new_item=None):
+ _check_if_has_indexable_children(self)
+
+ # pylint: disable=access-member-before-definition,
+ # pylint: disable=attribute-defined-outside-init
+ if isinstance(self.children, Component):
+ if getattr(self.children, "id", None) is not None:
+ # Woohoo! It's the item that we're looking for
+ if self.children.id == id: # type: ignore[reportAttributeAccessIssue]
+ if operation == "get":
+ return self.children
+ if operation == "set":
+ self.children = new_item
+ return
+ if operation == "delete":
+ self.children = None
+ return
+
+ # Recursively dig into its subtree
+ try:
+ if operation == "get":
+ return self.children.__getitem__(id)
+ if operation == "set":
+ self.children.__setitem__(id, new_item)
+ return
+ if operation == "delete":
+ self.children.__delitem__(id)
+ return
+ except KeyError:
+ pass
+
+ # if children is like a list
+ if isinstance(self.children, (tuple, MutableSequence)):
+ for i, item in enumerate(self.children): # type: ignore[reportOptionalIterable]
+ # If the item itself is the one we're looking for
+ if getattr(item, "id", None) == id:
+ if operation == "get":
+ return item
+ if operation == "set":
+ self.children[i] = new_item # type: ignore[reportOptionalSubscript]
+ return
+ if operation == "delete":
+ del self.children[i] # type: ignore[reportOptionalSubscript]
+ return
+
+ # Otherwise, recursively dig into that item's subtree
+ # Make sure it's not like a string
+ elif isinstance(item, Component):
+ try:
+ if operation == "get":
+ return item.__getitem__(id)
+ if operation == "set":
+ item.__setitem__(id, new_item)
+ return
+ if operation == "delete":
+ item.__delitem__(id)
+ return
+ except KeyError:
+ pass
+
+ # The end of our branch
+ # If we were in a list, then this exception will get caught
+ raise KeyError(id)
+
+ # Magic methods for a mapping interface:
+ # - __getitem__
+ # - __setitem__
+ # - __delitem__
+ # - __iter__
+ # - __len__
+
+ def __getitem__(self, id): # pylint: disable=redefined-builtin
+ """Recursively find the element with the given ID through the tree of
+ children."""
+
+ # A component's children can be undefined, a string, another component,
+ # or a list of components.
+ return self._get_set_or_delete(id, "get")
+
+ def __setitem__(self, id, item): # pylint: disable=redefined-builtin
+ """Set an element by its ID."""
+ return self._get_set_or_delete(id, "set", item)
+
+ def __delitem__(self, id): # pylint: disable=redefined-builtin
+ """Delete items by ID in the tree of children."""
+ return self._get_set_or_delete(id, "delete")
+
+ def _traverse(self):
+ """Yield each item in the tree."""
+ for t in self._traverse_with_paths():
+ yield t[1]
+
+ @staticmethod
+ def _id_str(component):
+ id_ = stringify_id(getattr(component, "id", ""))
+ return id_ and f" (id={id_:s})"
+
+ def _traverse_with_paths(self):
+ """Yield each item with its path in the tree."""
+ children = getattr(self, "children", None)
+ children_type = type(children).__name__
+ children_string = children_type + self._id_str(children)
+
+ # children is just a component
+ if isinstance(children, Component):
+ yield "[*] " + children_string, children
+ # pylint: disable=protected-access
+ for p, t in children._traverse_with_paths():
+ yield "\n".join(["[*] " + children_string, p]), t
+
+ # children is a list of components
+ elif isinstance(children, (tuple, MutableSequence)):
+ for idx, i in enumerate(children): # type: ignore[reportOptionalIterable]
+ list_path = f"[{idx:d}] {type(i).__name__:s}{self._id_str(i)}"
+ yield list_path, i
+
+ if isinstance(i, Component):
+ # pylint: disable=protected-access
+ for p, t in i._traverse_with_paths():
+ yield "\n".join([list_path, p]), t
+
+ def _traverse_ids(self):
+ """Yield components with IDs in the tree of children."""
+ for t in self._traverse():
+ if isinstance(t, Component) and getattr(t, "id", None) is not None:
+ yield t
+
+ def __iter__(self):
+ """Yield IDs in the tree of children."""
+ for t in self._traverse_ids():
+ yield t.id # type: ignore[reportAttributeAccessIssue]
+
+ def __len__(self):
+ """Return the number of items in the tree."""
+ # TODO - Should we return the number of items that have IDs
+ # or just the number of items?
+ # The number of items is more intuitive but returning the number
+ # of IDs matches __iter__ better.
+ length = 0
+ if getattr(self, "children", None) is None:
+ length = 0
+ elif isinstance(self.children, Component):
+ length = 1
+ length += len(self.children)
+ elif isinstance(self.children, (tuple, MutableSequence)):
+ for c in self.children: # type: ignore[reportOptionalIterable]
+ length += 1
+ if isinstance(c, Component):
+ length += len(c)
+ else:
+ # string or number
+ length = 1
+ return length
+
+ def __repr__(self):
+ # pylint: disable=no-member
+ props_with_values = [
+ c for c in self._prop_names if getattr(self, c, None) is not None
+ ] + [
+ c
+ for c in self.__dict__
+ if any(c.startswith(wc_attr) for wc_attr in self._valid_wildcard_attributes)
+ ]
+ if any(p != "children" for p in props_with_values):
+ props_string = ", ".join(
+ f"{p}={getattr(self, p)!r}" for p in props_with_values
+ )
+ else:
+ props_string = repr(getattr(self, "children", None))
+ return f"{self._type}({props_string})"
+
+ def _validate_deprecation(self):
+ _type = getattr(self, "_type", "")
+ _ns = getattr(self, "_namespace", "")
+ deprecation_message = _deprecated_components.get(_ns, {}).get(_type)
+ if deprecation_message:
+ warnings.warn(DeprecationWarning(textwrap.dedent(deprecation_message)))
+
+
+# Renderable node type.
+ComponentType = typing.Union[
+ str,
+ int,
+ float,
+ Component,
+ None,
+ typing.Sequence[typing.Union[str, int, float, Component, None]],
+]
+
+ComponentTemplate = typing.TypeVar("ComponentTemplate")
+
+
+# This wrapper adds an argument given to generated Component.__init__
+# with the actual given parameters by the user as a list of string.
+# This is then checked in the generated init to check if required
+# props were provided.
+def _explicitize_args(func):
+ varnames = func.__code__.co_varnames
+
+ def wrapper(*args, **kwargs):
+ if "_explicit_args" in kwargs:
+ raise Exception("Variable _explicit_args should not be set.")
+ kwargs["_explicit_args"] = list(
+ set(list(varnames[: len(args)]) + [k for k, _ in kwargs.items()])
+ )
+ if "self" in kwargs["_explicit_args"]:
+ kwargs["_explicit_args"].remove("self")
+ return func(*args, **kwargs)
+
+ new_sig = inspect.signature(wrapper).replace(
+ parameters=list(inspect.signature(func).parameters.values())
+ )
+ wrapper.__signature__ = new_sig # type: ignore[reportFunctionMemberAccess]
+ return wrapper
diff --git a/venv/lib/python3.8/site-packages/dash/development/build_process.py b/venv/lib/python3.8/site-packages/dash/development/build_process.py
new file mode 100644
index 0000000..be65553
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/build_process.py
@@ -0,0 +1,189 @@
+import os
+import sys
+import json
+import string
+import shutil
+import logging
+import coloredlogs
+import fire
+import requests
+
+from .._utils import run_command_with_process, compute_hash, job
+
+logger = logging.getLogger(__name__)
+coloredlogs.install(
+ fmt="%(asctime)s,%(msecs)03d %(levelname)s - %(message)s", datefmt="%H:%M:%S"
+)
+
+
+class BuildProcess:
+ def __init__(self, main, deps_info):
+ self.logger = logger
+ self.main = main
+ self.build_folder = self._concat(self.main, "build")
+ self.deps_info = deps_info
+ self.npm_modules = self._concat(self.main, "node_modules")
+ self.package_lock = self._concat(self.main, "package-lock.json")
+ self.package = self._concat(self.main, "package.json")
+ self._parse_package(path=self.package)
+ self.asset_paths = (self.deps_folder, self.npm_modules)
+
+ def _parse_package(self, path):
+ with open(path, "r", encoding="utf-8") as fp:
+ package = json.load(fp)
+ self.version = package["version"]
+ self.name = package["name"]
+ self.deps_folder = self._concat(self.main, os.pardir, "deps")
+ self.deps = package["dependencies"]
+
+ @staticmethod
+ def _concat(*paths):
+ return os.path.realpath(os.path.sep.join((path for path in paths if path)))
+
+ @staticmethod
+ def _clean_path(path):
+ if os.path.exists(path):
+ logger.warning("🚨 %s already exists, remove it!", path)
+ try:
+ if os.path.isfile(path):
+ os.remove(path)
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ except OSError:
+ sys.exit(1)
+ else:
+ logger.warning("🚨 %s doesn't exist, no action taken", path)
+
+ @job("clean all the previous assets generated by build tool")
+ def clean(self):
+ for path in self.asset_paths:
+ self._clean_path(path)
+
+ @job("run `npm ci`")
+ def npm(self):
+ """Job to install npm packages."""
+ os.chdir(self.main)
+ run_command_with_process("npm ci")
+
+ @job("build the renderer in dev mode")
+ def watch(self):
+ os.chdir(self.main)
+ os.system("npm run build:dev")
+
+ @job("run the whole building process in sequence")
+ def build(self, build=None):
+ self.clean()
+ self.npm()
+ self.bundles(build)
+ self.digest()
+
+ @job("compute the hash digest for assets")
+ def digest(self):
+ if not os.path.exists(self.deps_folder):
+ try:
+ os.makedirs(self.deps_folder)
+ except OSError:
+ logger.exception("🚨 having issues manipulating %s", self.deps_folder)
+ sys.exit(1)
+
+ payload = {self.name: self.version}
+
+ for folder in (self.deps_folder, self.build_folder):
+ copies = tuple(
+ _
+ for _ in os.listdir(folder)
+ if os.path.splitext(_)[-1] in {".js", ".map"}
+ )
+ logger.info("bundles in %s %s", folder, copies)
+
+ for copy in copies:
+ payload[f"SHA256 ({copy})"] = compute_hash(self._concat(folder, copy))
+
+ with open(self._concat(self.main, "digest.json"), "w", encoding="utf-8") as fp:
+ json.dump(payload, fp, sort_keys=True, indent=4, separators=(",", ":"))
+ logger.info(
+ "bundle digest in digest.json:\n%s",
+ json.dumps(payload, sort_keys=True, indent=4),
+ )
+
+ @job("copy and generate the bundles")
+ def bundles(self, build=None): # pylint:disable=too-many-locals
+ if not os.path.exists(self.deps_folder):
+ try:
+ os.makedirs(self.deps_folder)
+ except OSError:
+ logger.exception("🚨 having issues manipulating %s", self.deps_folder)
+ sys.exit(1)
+
+ self._parse_package(self.package_lock)
+
+ getattr(self, "_bundles_extra", lambda: None)()
+
+ versions = {
+ "version": self.version,
+ "package": self.name.replace(" ", "_").replace("-", "_"),
+ }
+
+ for scope, name, subfolder, filename, extras in self.deps_info:
+ version = self.deps["/".join(filter(None, [scope, name]))]["version"]
+ name_squashed = name.replace("-", "").replace(".", "")
+ versions[name_squashed] = version
+
+ logger.info("copy npm dependency => %s", filename)
+ ext = "min.js" if "min" in filename.split(".") else "js"
+ target = f"{name}@{version}.{ext}"
+
+ shutil.copyfile(
+ self._concat(self.npm_modules, scope, name, subfolder, filename),
+ self._concat(self.deps_folder, target),
+ )
+
+ if extras:
+ extras_str = '", "'.join(extras)
+ versions[f"extra_{name_squashed}_versions"] = f'"{extras_str}"'
+
+ for extra_version in extras:
+ url = f"https://unpkg.com/{name}@{extra_version}/umd/{filename}"
+ res = requests.get(url)
+ extra_target = f"{name}@{extra_version}.{ext}"
+ extra_path = self._concat(self.deps_folder, extra_target)
+ with open(extra_path, "wb") as fp:
+ fp.write(res.content)
+
+ _script = "build:dev" if build == "local" else "build:js"
+ logger.info("run `npm run %s`", _script)
+ os.chdir(self.main)
+ run_command_with_process(f"npm run {_script}")
+
+ logger.info("generate the `__init__.py` from template and versions")
+ with open(self._concat(self.main, "init.template"), encoding="utf-8") as fp:
+ t = string.Template(fp.read())
+
+ renderer_init = self._concat(self.deps_folder, os.pardir, "_dash_renderer.py")
+ with open(renderer_init, "w", encoding="utf-8") as fp:
+ fp.write(t.safe_substitute(versions))
+
+
+class Renderer(BuildProcess):
+ def __init__(self):
+ """dash-renderer's path is binding with the dash folder hierarchy."""
+ extras = [
+ "18.2.0",
+ "16.14.0",
+ ] # versions to include beyond what's in package.json
+ super().__init__(
+ self._concat(os.path.dirname(__file__), os.pardir, "dash-renderer"),
+ (
+ ("@babel", "polyfill", "dist", "polyfill.min.js", None),
+ (None, "react", "umd", "react.production.min.js", extras),
+ (None, "react", "umd", "react.development.js", extras),
+ (None, "react-dom", "umd", "react-dom.production.min.js", extras),
+ (None, "react-dom", "umd", "react-dom.development.js", extras),
+ (None, "prop-types", None, "prop-types.min.js", None),
+ (None, "prop-types", None, "prop-types.js", None),
+ ),
+ )
+
+
+def renderer():
+ fire.Fire(Renderer)
diff --git a/venv/lib/python3.8/site-packages/dash/development/component_generator.py b/venv/lib/python3.8/site-packages/dash/development/component_generator.py
new file mode 100644
index 0000000..276dbfb
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/component_generator.py
@@ -0,0 +1,296 @@
+from collections import OrderedDict
+
+import json
+import sys
+import subprocess
+import shlex
+import os
+import argparse
+import shutil
+import functools
+import pkg_resources
+import yaml
+
+from ._r_components_generation import write_class_file
+from ._r_components_generation import generate_exports
+from ._py_components_generation import generate_class_file
+from ._py_components_generation import generate_imports
+from ._py_components_generation import generate_classes_files
+from ._jl_components_generation import generate_struct_file
+from ._jl_components_generation import generate_module
+from ._generate_prop_types import generate_prop_types
+
+reserved_words = [
+ "UNDEFINED",
+ "REQUIRED",
+ "to_plotly_json",
+ "available_properties",
+ "available_wildcard_properties",
+ "_.*",
+]
+
+
+class _CombinedFormatter(
+ argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
+):
+ pass
+
+
+# pylint: disable=too-many-locals, too-many-arguments, too-many-branches, too-many-statements
+def generate_components(
+ components_source,
+ project_shortname,
+ package_info_filename="package.json",
+ ignore="^_",
+ rprefix=None,
+ rdepends="",
+ rimports="",
+ rsuggests="",
+ jlprefix=None,
+ metadata=None,
+ keep_prop_order=None,
+ max_props=None,
+ custom_typing_module=None,
+):
+
+ project_shortname = project_shortname.replace("-", "_").rstrip("/\\")
+
+ is_windows = sys.platform == "win32"
+
+ extract_path = pkg_resources.resource_filename("dash", "extract-meta.js")
+
+ reserved_patterns = "|".join(f"^{p}$" for p in reserved_words)
+
+ os.environ["NODE_PATH"] = "node_modules"
+
+ shutil.copyfile(
+ "package.json", os.path.join(project_shortname, package_info_filename)
+ )
+
+ if not metadata:
+ env = os.environ.copy()
+
+ # Ensure local node modules is used when the script is packaged.
+ env["MODULES_PATH"] = os.path.abspath("./node_modules")
+
+ cmd = shlex.split(
+ f'node {extract_path} "{ignore}" "{reserved_patterns}" {components_source}',
+ posix=not is_windows,
+ )
+
+ proc = subprocess.Popen( # pylint: disable=consider-using-with
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=is_windows,
+ env=env,
+ )
+ out, err = proc.communicate()
+ status = proc.poll()
+
+ if err:
+ print(err.decode(), file=sys.stderr)
+
+ if not out:
+ print(
+ f"Error generating metadata in {project_shortname} (status={status})",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ metadata = safe_json_loads(out.decode("utf-8"))
+
+ py_generator_kwargs = {
+ "custom_typing_module": custom_typing_module,
+ }
+ if keep_prop_order is not None:
+ keep_prop_order = [
+ component.strip(" ") for component in keep_prop_order.split(",")
+ ]
+ py_generator_kwargs["prop_reorder_exceptions"] = keep_prop_order
+
+ if max_props:
+ py_generator_kwargs["max_props"] = max_props
+
+ generator_methods = [functools.partial(generate_class_file, **py_generator_kwargs)]
+
+ pkg_data = None
+ if rprefix is not None or jlprefix is not None:
+ with open("package.json", "r", encoding="utf-8") as f:
+ pkg_data = safe_json_loads(f.read())
+
+ rpkg_data = None
+ if rprefix is not None:
+ if not os.path.exists("man"):
+ os.makedirs("man")
+ if not os.path.exists("R"):
+ os.makedirs("R")
+ if os.path.isfile("dash-info.yaml"):
+ with open("dash-info.yaml", encoding="utf-8") as yamldata:
+ rpkg_data = yaml.safe_load(yamldata)
+ generator_methods.append(
+ functools.partial(write_class_file, prefix=rprefix, rpkg_data=rpkg_data)
+ )
+
+ if jlprefix is not None:
+ generator_methods.append(
+ functools.partial(generate_struct_file, prefix=jlprefix)
+ )
+
+ components = generate_classes_files(project_shortname, metadata, *generator_methods)
+
+ generate_prop_types(
+ metadata,
+ project_shortname,
+ custom_typing_module=custom_typing_module,
+ )
+
+ with open(
+ os.path.join(project_shortname, "metadata.json"), "w", encoding="utf-8"
+ ) as f:
+ json.dump(metadata, f, separators=(",", ":"))
+
+ generate_imports(project_shortname, components)
+
+ if rprefix is not None:
+ generate_exports(
+ project_shortname,
+ components,
+ metadata,
+ pkg_data,
+ rpkg_data,
+ rprefix,
+ rdepends,
+ rimports,
+ rsuggests,
+ )
+
+ if jlprefix is not None:
+ generate_module(project_shortname, components, metadata, pkg_data, jlprefix)
+
+
+def safe_json_loads(s):
+ jsondata_unicode = json.loads(s, object_pairs_hook=OrderedDict)
+ if sys.version_info[0] >= 3:
+ return jsondata_unicode
+ return byteify(jsondata_unicode)
+
+
+def component_build_arg_parser():
+ parser = argparse.ArgumentParser(
+ prog="dash-generate-components",
+ formatter_class=_CombinedFormatter,
+ description="Generate dash components by extracting the metadata "
+ "using react-docgen. Then map the metadata to Python classes.",
+ )
+ parser.add_argument("components_source", help="React components source directory.")
+ parser.add_argument(
+ "project_shortname", help="Name of the project to export the classes files."
+ )
+ parser.add_argument(
+ "-p",
+ "--package-info-filename",
+ default="package.json",
+ help="The filename of the copied `package.json` to `project_shortname`",
+ )
+ parser.add_argument(
+ "-i",
+ "--ignore",
+ default="^_",
+ help="Files/directories matching the pattern will be ignored",
+ )
+ parser.add_argument(
+ "--r-prefix",
+ help="Specify a prefix for Dash for R component names, write "
+ "components to R dir, create R package.",
+ )
+ parser.add_argument(
+ "--r-depends",
+ default="",
+ help="Specify a comma-separated list of R packages to be "
+ "inserted into the Depends field of the DESCRIPTION file.",
+ )
+ parser.add_argument(
+ "--r-imports",
+ default="",
+ help="Specify a comma-separated list of R packages to be "
+ "inserted into the Imports field of the DESCRIPTION file.",
+ )
+ parser.add_argument(
+ "--r-suggests",
+ default="",
+ help="Specify a comma-separated list of R packages to be "
+ "inserted into the Suggests field of the DESCRIPTION file.",
+ )
+ parser.add_argument(
+ "--jl-prefix",
+ help="Specify a prefix for Dash for R component names, write "
+ "components to R dir, create R package.",
+ )
+ parser.add_argument(
+ "-k",
+ "--keep-prop-order",
+ default=None,
+ help="Specify a comma-separated list of components which will use the prop "
+ "order described in the component proptypes instead of alphabetically reordered "
+ "props. Pass the 'ALL' keyword to have every component retain "
+ "its original prop order.",
+ )
+ parser.add_argument(
+ "--max-props",
+ type=int,
+ default=250,
+ help="Specify the max number of props to list in the component signature. "
+ "More props will still be shown in the docstring, and will still work when "
+ "provided as kwargs to the component. Python <3.7 only supports 255 args, "
+ "but you may also want to reduce further for improved readability at the "
+ "expense of auto-completion for the later props. Use 0 to include all props.",
+ )
+ parser.add_argument(
+ "-t",
+ "--custom-typing-module",
+ type=str,
+ default="dash_prop_typing",
+ help=" Module containing custom typing definition for components."
+ "Can contains two variables:\n"
+ " - custom_imports: dict[ComponentName, list[str]].\n"
+ " - custom_props: dict[ComponentName, dict[PropName, function]].\n",
+ )
+ return parser
+
+
+def cli():
+ # Add current path for loading modules.
+ sys.path.insert(0, ".")
+ args = component_build_arg_parser().parse_args()
+ generate_components(
+ args.components_source,
+ args.project_shortname,
+ package_info_filename=args.package_info_filename,
+ ignore=args.ignore,
+ rprefix=args.r_prefix,
+ rdepends=args.r_depends,
+ rimports=args.r_imports,
+ rsuggests=args.r_suggests,
+ jlprefix=args.jl_prefix,
+ keep_prop_order=args.keep_prop_order,
+ max_props=args.max_props,
+ custom_typing_module=args.custom_typing_module,
+ )
+
+
+# pylint: disable=undefined-variable
+def byteify(input_object):
+ if isinstance(input_object, dict):
+ return OrderedDict(
+ [(byteify(key), byteify(value)) for key, value in input_object.items()]
+ )
+ if isinstance(input_object, list):
+ return [byteify(element) for element in input_object]
+ if isinstance(input_object, str): # noqa:F821
+ return input_object.encode(encoding="utf-8")
+ return input_object
+
+
+if __name__ == "__main__":
+ cli()
diff --git a/venv/lib/python3.8/site-packages/dash/development/update_components.py b/venv/lib/python3.8/site-packages/dash/development/update_components.py
new file mode 100644
index 0000000..2789be2
--- /dev/null
+++ b/venv/lib/python3.8/site-packages/dash/development/update_components.py
@@ -0,0 +1,179 @@
+import sys
+import subprocess
+import shlex
+import os
+import argparse
+import shutil
+import logging
+import coloredlogs
+
+
+class _CombinedFormatter(
+ argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
+):
+ pass
+
+
+logger = logging.getLogger(__name__)
+coloredlogs.install(
+ fmt="%(asctime)s,%(msecs)03d %(levelname)s - %(message)s", datefmt="%H:%M:%S"
+)
+
+dest_dir_map = {
+ "dash-core-components": "dcc",
+ "dash-html-components": "html",
+ "dash-table": "dash_table",
+}
+
+
+def status_print(msg, **kwargs):
+ try:
+ print(msg, **kwargs)
+ except UnicodeEncodeError:
+ print(msg.encode("ascii", "ignore"), **kwargs)
+
+
+def bootstrap_components(components_source, concurrency, install_type):
+
+ is_windows = sys.platform == "win32"
+
+ source_glob = (
+ components_source
+ if components_source != "all"
+ else "{dash-core-components,dash-html-components,dash-table}"
+ )
+
+ cmdstr = f"npx lerna exec --concurrency {concurrency} --scope='{source_glob}' -- npm {install_type}"
+ cmd = shlex.split(cmdstr, posix=not is_windows)
+ status_print(cmdstr)
+
+ with subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
+ ) as proc:
+ out, err = proc.communicate()
+ status = proc.poll()
+
+ if err:
+ status_print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
+
+ if status or not out:
+ status_print(
+ f"🚨 Failed installing npm dependencies for component packages: {source_glob} (status={status}) 🚨",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+ else:
+ status_print(
+ f"🟢 Finished installing npm dependencies for component packages: {source_glob} 🟢",
+ file=sys.stderr,
+ )
+
+
+def build_components(components_source, concurrency):
+
+ is_windows = sys.platform == "win32"
+
+ source_glob = (
+ components_source
+ if components_source != "all"
+ else "{dash-core-components,dash-html-components,dash-table}"
+ )
+
+ cmdstr = f"npx lerna exec --concurrency {concurrency} --scope='{source_glob}' -- npm run build"
+ cmd = shlex.split(cmdstr, posix=not is_windows)
+ status_print(cmdstr)
+
+ with subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=is_windows
+ ) as proc:
+ out, err = proc.communicate()
+ status = proc.poll()
+
+ if err:
+ status_print(("🛑 " if status else "") + err.decode(), file=sys.stderr)
+
+ if status or not out:
+ status_print(
+ f"🚨 Finished updating component packages: {source_glob} (status={status}) 🚨",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ if "{" in source_glob:
+ source_glob = source_glob.split("{")[1].split("}")[0]
+
+ for package in source_glob.split(","):
+ build_directory = os.path.join(
+ "components", package, package.replace("-", "_").rstrip("/\\")
+ )
+
+ dest_dir = dest_dir_map.get(package) or package
+
+ dest_path = os.path.join("dash", dest_dir)
+
+ if not os.path.exists(dest_path):
+ try:
+ os.makedirs(dest_path)
+ except OSError:
+ logger.exception("🚨 Having issues manipulating %s", dest_path)
+ sys.exit(1)
+
+ if not os.path.exists(build_directory):
+ status_print(
+ "🚨 Could not locate build artifacts."
+ + " Check that the npm build process completed"
+ + f" successfully for package: {package} 🚨"
+ )
+ sys.exit(1)
+ else:
+ status_print(f"🚚 Moving build artifacts from {build_directory} to Dash 🚚")
+ shutil.rmtree(dest_path)
+ shutil.copytree(build_directory, dest_path)
+ with open(os.path.join(dest_path, ".gitkeep"), "w", encoding="utf-8"):
+ pass
+ status_print(
+ f"🟢 Finished moving build artifacts from {build_directory} to Dash 🟢"
+ )
+
+
+def cli():
+ parser = argparse.ArgumentParser(
+ prog="dash-update-components",
+ formatter_class=_CombinedFormatter,
+ description="Update the specified subcomponent libraries within Dash"
+ " by copying over build artifacts, dependencies, and dependency metadata.",
+ )
+ parser.add_argument(
+ "components_source",
+ help="A glob string that matches the Dash component libraries to be updated"
+ " (eg.'dash-table' // 'dash-core-components|dash-html-components' // 'all')."
+ " The default argument is 'all'.",
+ default="all",
+ )
+ parser.add_argument(
+ "--concurrency",
+ type=int,
+ default=3,
+ help="Maximum concurrent steps, up to 3 (ie all components in parallel)",
+ )
+ parser.add_argument(
+ "--ci",
+ help="For clean-install use '--ci True'",
+ default="False",
+ )
+
+ args = parser.parse_args()
+
+ if sys.platform == "win32":
+ args.components_source = args.components_source.replace('"', "").replace(
+ "'", ""
+ )
+
+ bootstrap_components(
+ args.components_source, args.concurrency, "ci" if args.ci == "True" else "i"
+ )
+ build_components(args.components_source, args.concurrency)
+
+
+if __name__ == "__main__":
+ cli()