after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def plot(result_pickle_file_path, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_pickle_file_path)
plot_result(result_dict, show, plot_save_file)
|
def plot(result_dict_file, show, plot_save_file):
"""
[sys_analyser] draw result DataFrame
"""
import pandas as pd
from .plot import plot_result
result_dict = pd.read_pickle(result_dict_file)
plot_result(result_dict, show, plot_save_file)
|
https://github.com/ricequant/rqalpha/issues/109
|
Traceback (most recent call last):
File "c:\programdata\anaconda2\lib\runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "c:\programdata\anaconda2\lib\runpy.py", line 72, in _run_code
exec code in run_globals
│ └ {'__builtins__': <module '__builtin__' (built-in)>, '__file__': 'C:\ProgramData\Anaconda2\Scripts\rqalpha.exe\__main__.py',...
qalpha.exe\__main__.py", line 2>> at 0256EA40, file "C:\ProgramData\Anaconda2\Scripts
File "C:\ProgramData\Anaconda2\Scripts\rqalpha.exe\__main__.py", line 9, in <module>
sys.exit(entry_point())
│ └ <function entry_point at 0x047D1CF0>
└ <module 'sys' (built-in)>
File "c:\programdata\anaconda2\lib\site-packages\rqalpha\__main__.py", line 66, in entry_point
cli(obj={})
└ <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 722, in __call__
return self.main(*args, **kwargs)
│ │ └ {'obj': {'VERBOSE': 0}}
│ └ ()
└ <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 697, in main
rv = self.invoke(ctx)
│ └ <click.core.Context object at 0x0482CC10>
└ <click.core.Group object at 0x047CFE90>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 1066, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
│ │ └ <click.core.Context object at 0x0482CE50>
│ └ <click.core.Context object at 0x0482CE50>
└ <function _process_result at 0x0482D5B0>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
│ │ └ <click.core.Context object at 0x0482CE50>
│ └ <click.core.Command object at 0x0482CF50>
└ <click.core.Context object at 0x0482CE50>
File "c:\programdata\anaconda2\lib\site-packages\click\core.py", line 535, in invoke
return callback(*args, **kwargs)
│ │ └ {'result_pickle_file_path': u'./1.pkl', 'plot_save_file': None, 'show': True}
│ └ ()
└ <function plot at 0x0482D830>
TypeError: plot() got an unexpected keyword argument 'result_pickle_file_path'
|
TypeError
|
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
# verify that the line is JSON
line = line.decode("utf-8")
try:
json.loads(line)
except ValueError:
# log event wasn't JSON.
# use the line itself as the message with unknown phase.
# We don't know what the right phase is, use 'unknown'.
# If it was a fatal error, presumably a 'failure'
# message will arrive shortly.
app_log.error("log event not json: %r", line)
line = json.dumps(
{
"phase": "unknown",
"message": line,
}
)
self.progress("log", line)
|
def stream_logs(self):
"""Stream a pod's log."""
for line in self.api.read_namespaced_pod_log(
self.name, self.namespace, follow=True, _preload_content=False
):
self.progress("log", line.decode("utf-8"))
|
https://github.com/jupyterhub/binderhub/issues/164
|
/ # jupyter-repo2docker https://github.com/yuvipanda/example-requirements --json-logs
Traceback (most recent call last):
File "/usr/local/bin/jupyter-repo2docker", line 11, in <module>
load_entry_point('jupyter-repo2docker==0.4.1', 'console_scripts', 'jupyter-repo2docker')()
File "/usr/local/lib/python3.6/site-packages/repo2docker/__main__.py", line 6, in main
f.start()
File "/usr/local/lib/python3.6/site-packages/repo2docker/app.py", line 309, in start
checkout_path
File "/usr/local/lib/python3.6/site-packages/repo2docker/app.py", line 95, in fetch
capture=self.json_logs):
File "/usr/local/lib/python3.6/site-packages/repo2docker/utils.py", line 12, in execute_cmd
proc = subprocess.Popen(cmd, **kwargs)
File "/usr/local/lib/python3.6/subprocess.py", line 709, in __init__
restore_signals, start_new_session)
File "/usr/local/lib/python3.6/subprocess.py", line 1344, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] No such file or directory: 'git': 'git'
|
FileNotFoundError
|
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).toLocalFile()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFile in uniqueProjects:
projects.remove(projectFile)
# add the new value in the first place
projects.insert(0, projectFile)
# keep only the 10 first elements
projects = projects[0:20]
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
|
def addRecentProjectFile(self, projectFile):
projectFile = QUrl(projectFile).path()
projects = self._recentProjectFiles()
# remove duplicates while preserving order
from collections import OrderedDict
uniqueProjects = OrderedDict.fromkeys(projects)
projects = list(uniqueProjects)
# remove previous usage of the value
if projectFile in uniqueProjects:
projects.remove(projectFile)
# add the new value in the first place
projects.insert(0, projectFile)
# keep only the 10 first elements
projects = projects[0:20]
settings = QSettings()
settings.beginGroup("RecentFiles")
size = settings.beginWriteArray("Projects")
for i, p in enumerate(projects):
settings.setArrayIndex(i)
settings.setValue("filepath", p)
settings.endArray()
settings.sync()
self.recentProjectFilesChanged.emit()
|
https://github.com/alicevision/meshroom/issues/912
|
[2020-05-23 16:12:48,660][ERROR] Traceback (most recent call last):
File "D:\Meshroom_Src\meshroom\meshroom\ui\reconstruction.py", line 432, in load
super(Reconstruction, self).load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\ui\graph.py", line 314, in load
g.load(filepath, setupProjectFile)
File "D:\Meshroom_Src\meshroom\meshroom\core\graph.py", line 247, in load
with open(filepath) as jsonFile:
OSError: [Errno 22] Invalid argument: '/D:/Meshroom_Dev/test-project/mostree.mg'
|
OSError
|
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return initial CameraInit and SfM nodes.
Args:
withMVS (bool): whether to create the MVS pipeline after the augmentation
Returns:
Node, Node: CameraInit, StructureFromMotion
"""
sfm = self.lastSfmNode()
if not sfm:
return None, None
if len(self._cameraInits) == 1:
assert self._cameraInit == self._cameraInits[0]
# Initial CameraInit is empty, use this one
if len(self._cameraInits[0].viewpoints) == 0:
return self._cameraInit, sfm
with self.groupedGraphModification("SfM Augmentation"):
sfm, mvs = multiview.sfmAugmentation(self, self.lastSfmNode(), withMVS=withMVS)
self.sfmAugmented.emit(sfm[0], mvs[-1] if mvs else sfm[-1])
return sfm[0], sfm[-1]
|
def addSfmAugmentation(self, withMVS=False):
"""
Create a new augmentation step connected to the last SfM node of this Reconstruction and
return the created CameraInit and SfM nodes.
If the Reconstruction is not initialized (empty initial CameraInit), this method won't
create anything and return initial CameraInit and SfM nodes.
Args:
withMVS (bool): whether to create the MVS pipeline after the augmentation
Returns:
Node, Node: CameraInit, StructureFromMotion
"""
sfm = self.lastSfmNode()
if not sfm:
return None, None
if len(self._cameraInits) == 1:
assert self._cameraInit == self._cameraInits[0]
# Initial CameraInit is empty, use this one
if len(self._cameraInits[0].viewpoints) == 0:
return self._cameraInit, sfm
with self.groupedGraphModification("SfM Augmentation"):
sfm, mvs = multiview.sfmAugmentation(self, self.lastSfmNode(), withMVS=withMVS)
self.sfmAugmented.emit(sfm[0], mvs[-1])
return sfm[0], sfm[-1]
|
https://github.com/alicevision/meshroom/issues/127
|
Traceback (most recent call last):
File "C:\Users\andre\work\meshroom\meshroom\ui\reconstruction.py", line 72, in start
raise RuntimeError("Invalid folder provided: {}".format(folder))
RuntimeError: Invalid folder provided: /F:/ai-ml-models/images/live
|
RuntimeError
|
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
pymathicspart = part
if pymathicspart is None:
pymathicspart = DocPart(self, "Pymathics Modules", is_reference=True)
self.parts.append(pymathicspart)
# For each module, create the documentation object and load the chapters in the pymathics part.
for pymmodule in default_pymathics_modules:
pymathicsdoc = PyMathicsDocumentation(pymmodule)
for part in pymathicsdoc.parts:
for ch in part.chapters:
ch.title = f"{pymmodule} {part.title} {ch.title}"
ch.part = pymathicspart
pymathicspart.chapters_by_slug[ch.slug] = ch
pymathicspart.chapters.append(ch)
self.pymathics_doc_loaded = True
|
def load_pymathics_doc(self):
if self.pymathics_doc_loaded:
return
from mathics.settings import default_pymathics_modules
pymathicspart = None
# Look the "Pymathics Modules" part, and if it does not exist, create it.
for part in self.parts:
if part.title == "Pymathics Modules":
pymathicspart = part
if pymathicspart is None:
pymathicspart = DocPart(self, "Pymathics Modules", is_reference=True)
self.parts.append(pymathicspart)
# For each module, create the documentation object and load the chapters in the pymathics part.
for pymmodule in default_pymathics_modules:
pymathicsdoc = PyMathicsDocumentation(pymmodule)
for part in pymathicsdoc.parts:
for ch in part.chapters:
ch.title = f"{pymmodule.name} {part.title} {ch.title}"
ch.part = pymathicspart
pymathicspart.chapters_by_slug[ch.slug] = ch
pymathicspart.chapters.append(ch)
self.pymathics_doc_loaded = True
|
https://github.com/mathics/Mathics/issues/906
|
$ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 361, in execute
self.check()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 390, in check
include_deployment_checks=include_deployment_checks,
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/commands/migrate.py", line 65, in _run_checks
issues.extend(super()._run_checks(**kwargs))
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 377, in _run_checks
return checks.run_checks(**kwargs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/registry.py", line 72, in run_checks
new_errors = check(app_configs=app_configs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 40, in check_url_namespaces_unique
all_namespaces = _load_all_namespaces(resolver)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 57, in _load_all_namespaces
url_patterns = getattr(resolver, 'url_patterns', [])
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 571, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 564, in urlconf_module
return import_module(self.urlconf_name)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/urls.py", line 14, in <module>
url(r'^', include('mathics.web.urls')),
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/conf.py", line 34, in include
urlconf_module = import_module(urlconf_module)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/web/urls.py", line 6, in <module>
from mathics.web.views import query, main_view, login, logout, save, open, get_worksheets, doc_search, doc_part, doc_chapter, doc_section, doc
File "/home/pablo/Documents/Mathics/mathics/web/views.py", line 28, in <module>
documentation.load_pymathics_doc()
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 727, in load_pymathics_doc
pymathicsdoc = PyMathicsDocumentation(pymmodule)
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 765, in __init__
self.name = self.pymathicsmodule.pymathics_version_data['name']
KeyError: 'name'
error: failed to create database
|
KeyError
|
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the module and verifies it is a pymathics module
try:
self.pymathicsmodule = importlib.import_module(module)
except ImportError:
print("Module does not exist")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
try:
mainfolder = self.pymathicsmodule.__path__[0]
self.name = self.pymathicsmodule.pymathics_version_data["name"]
self.version = self.pymathicsmodule.pymathics_version_data["version"]
self.author = self.pymathicsmodule.pymathics_version_data["author"]
except (AttributeError, KeyError, IndexError):
print(module + " is not a pymathics module.")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
# Paths
self.doc_dir = self.pymathicsmodule.__path__[0] + "/doc/"
self.xml_data_file = self.doc_dir + "xml/data"
self.tex_data_file = self.doc_dir + "tex/data"
self.latex_file = self.doc_dir + "tex/documentation.tex"
# Load the dictionary of mathics symbols defined in the module
self.symbols = {}
from mathics.builtin import is_builtin, Builtin
print("loading symbols")
for name in dir(self.pymathicsmodule):
var = getattr(self.pymathicsmodule, name)
if (
hasattr(var, "__module__")
and var.__module__ != "mathics.builtin.base"
and is_builtin(var)
and not name.startswith("_")
and var.__module__[: len(self.pymathicsmodule.__name__)]
== self.pymathicsmodule.__name__
): # nopep8
instance = var(expression=False)
if isinstance(instance, Builtin):
self.symbols[instance.get_name()] = instance
# Defines de default first part, in case we are building an independent documentation module.
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
try:
files = listdir(self.doc_dir)
files.sort()
except FileNotFoundError:
self.doc_dir = ""
self.xml_data_file = ""
self.tex_data_file = ""
self.latex_file = ""
files = []
appendix = []
for file in files:
part_title = file[2:]
if part_title.endswith(".mdoc"):
part_title = part_title[: -len(".mdoc")]
part = DocPart(self, part_title)
text = open(self.doc_dir + file, "rb").read().decode("utf8")
text = filter_comments(text)
chapters = CHAPTER_RE.findall(text)
for title, text in chapters:
chapter = DocChapter(part, title)
text += '<section title=""></section>'
sections = SECTION_RE.findall(text)
for pre_text, title, text in sections:
if not chapter.doc:
chapter.doc = Doc(pre_text)
if title:
section = DocSection(chapter, title, text)
chapter.sections.append(section)
part.chapters.append(chapter)
if file[0].isdigit():
self.parts.append(part)
else:
part.is_appendix = True
appendix.append(part)
# Builds the automatic documentation
builtin_part = DocPart(self, "Pymathics Modules", is_reference=True)
title, text = get_module_doc(self.pymathicsmodule)
chapter = DocChapter(builtin_part, title, Doc(text))
for name in self.symbols:
instance = self.symbols[name]
installed = True
for package in getattr(instance, "requires", []):
try:
importlib.import_module(package)
except ImportError:
installed = False
break
section = DocSection(
chapter,
strip_system_prefix(name),
instance.__doc__ or "",
operator=instance.get_operator(),
installed=installed,
)
chapter.sections.append(section)
builtin_part.chapters.append(chapter)
self.parts.append(builtin_part)
# Adds possible appendices
for part in appendix:
self.parts.append(part)
# set keys of tests
for tests in self.get_tests():
for test in tests.tests:
test.key = (tests.part, tests.chapter, tests.section, test.index)
|
def __init__(self, module=None):
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
self.doc_dir = None
self.xml_data_file = None
self.tex_data_file = None
self.latex_file = None
self.symbols = {}
if module is None:
return
import importlib
# Load the module and verifies it is a pymathics module
try:
self.pymathicsmodule = importlib.import_module(module)
except ImportError:
print("Module does not exist")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
if hasattr(self.pymathicsmodule, "pymathics_version_data"):
mainfolder = self.pymathicsmodule.__path__[0]
self.name = self.pymathicsmodule.pymathics_version_data["name"]
self.version = self.pymathicsmodule.pymathics_version_data["version"]
self.author = self.pymathicsmodule.pymathics_version_data["author"]
else:
print(module + " is not a pymathics module.")
mainfolder = ""
self.pymathicsmodule = None
self.parts = []
return
# Paths
self.doc_dir = self.pymathicsmodule.__path__[0] + "/doc/"
self.xml_data_file = self.doc_dir + "xml/data"
self.tex_data_file = self.doc_dir + "tex/data"
self.latex_file = self.doc_dir + "tex/documentation.tex"
# Load the dictionary of mathics symbols defined in the module
self.symbols = {}
from mathics.builtin import is_builtin, Builtin
print("loading symbols")
for name in dir(self.pymathicsmodule):
var = getattr(self.pymathicsmodule, name)
if (
hasattr(var, "__module__")
and var.__module__ != "mathics.builtin.base"
and is_builtin(var)
and not name.startswith("_")
and var.__module__[: len(self.pymathicsmodule.__name__)]
== self.pymathicsmodule.__name__
): # nopep8
instance = var(expression=False)
if isinstance(instance, Builtin):
self.symbols[instance.get_name()] = instance
# Defines de default first part, in case we are building an independent documentation module.
self.title = "Overview"
self.parts = []
self.parts_by_slug = {}
try:
files = listdir(self.doc_dir)
files.sort()
except FileNotFoundError:
self.doc_dir = ""
self.xml_data_file = ""
self.tex_data_file = ""
self.latex_file = ""
files = []
appendix = []
for file in files:
part_title = file[2:]
if part_title.endswith(".mdoc"):
part_title = part_title[: -len(".mdoc")]
part = DocPart(self, part_title)
text = open(self.doc_dir + file, "rb").read().decode("utf8")
text = filter_comments(text)
chapters = CHAPTER_RE.findall(text)
for title, text in chapters:
chapter = DocChapter(part, title)
text += '<section title=""></section>'
sections = SECTION_RE.findall(text)
for pre_text, title, text in sections:
if not chapter.doc:
chapter.doc = Doc(pre_text)
if title:
section = DocSection(chapter, title, text)
chapter.sections.append(section)
part.chapters.append(chapter)
if file[0].isdigit():
self.parts.append(part)
else:
part.is_appendix = True
appendix.append(part)
# Builds the automatic documentation
builtin_part = DocPart(self, "Pymathics Modules", is_reference=True)
title, text = get_module_doc(self.pymathicsmodule)
chapter = DocChapter(builtin_part, title, Doc(text))
for name in self.symbols:
instance = self.symbols[name]
installed = True
for package in getattr(instance, "requires", []):
try:
importlib.import_module(package)
except ImportError:
installed = False
break
section = DocSection(
chapter,
strip_system_prefix(name),
instance.__doc__ or "",
operator=instance.get_operator(),
installed=installed,
)
chapter.sections.append(section)
builtin_part.chapters.append(chapter)
self.parts.append(builtin_part)
# Adds possible appendices
for part in appendix:
self.parts.append(part)
# set keys of tests
for tests in self.get_tests():
for test in tests.tests:
test.key = (tests.part, tests.chapter, tests.section, test.index)
|
https://github.com/mathics/Mathics/issues/906
|
$ mathicsserver
warning: database file /home/pablo/.local/var/mathics/mathics.sqlite not found
Migrating database /home/pablo/.local/var/mathics/mathics.sqlite
Traceback (most recent call last):
File "/home/pablo/Documents/Mathics/mathics/manage.py", line 13, in <module>
execute_from_command_line(sys.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 381, in execute_from_command_line
utility.execute()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 361, in execute
self.check()
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 390, in check
include_deployment_checks=include_deployment_checks,
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/commands/migrate.py", line 65, in _run_checks
issues.extend(super()._run_checks(**kwargs))
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/management/base.py", line 377, in _run_checks
return checks.run_checks(**kwargs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/registry.py", line 72, in run_checks
new_errors = check(app_configs=app_configs)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 40, in check_url_namespaces_unique
all_namespaces = _load_all_namespaces(resolver)
File "/home/pablo/.local/lib/python3.6/site-packages/django/core/checks/urls.py", line 57, in _load_all_namespaces
url_patterns = getattr(resolver, 'url_patterns', [])
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 571, in url_patterns
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
File "/home/pablo/.local/lib/python3.6/site-packages/django/utils/functional.py", line 80, in __get__
res = instance.__dict__[self.name] = self.func(instance)
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/resolvers.py", line 564, in urlconf_module
return import_module(self.urlconf_name)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/urls.py", line 14, in <module>
url(r'^', include('mathics.web.urls')),
File "/home/pablo/.local/lib/python3.6/site-packages/django/urls/conf.py", line 34, in include
urlconf_module = import_module(urlconf_module)
File "/usr/lib/python3.6/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 994, in _gcd_import
File "<frozen importlib._bootstrap>", line 971, in _find_and_load
File "<frozen importlib._bootstrap>", line 955, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 665, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/home/pablo/Documents/Mathics/mathics/web/urls.py", line 6, in <module>
from mathics.web.views import query, main_view, login, logout, save, open, get_worksheets, doc_search, doc_part, doc_chapter, doc_section, doc
File "/home/pablo/Documents/Mathics/mathics/web/views.py", line 28, in <module>
documentation.load_pymathics_doc()
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 727, in load_pymathics_doc
pymathicsdoc = PyMathicsDocumentation(pymmodule)
File "/home/pablo/Documents/Mathics/mathics/doc/doc.py", line 765, in __init__
self.name = self.pymathicsmodule.pymathics_version_data['name']
KeyError: 'name'
error: failed to create database
|
KeyError
|
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if not key.startswith("mathics."):
print(f'removing module "{key}" not in mathics.')
del builtins_by_module[key]
# print("reloading symbols from current builtins.")
for s in self.pymathics:
if s in self.builtin:
# If there was a true built-in definition for the symbol, restore it, else, remove he symbol.
if self.pymathics[s]:
self.builtin[s] = self.pymathics[s]
builtins[s] = None
for key, val in builtins_by_module.items():
for simb in val:
if simb.get_name() == s:
builtins[s] = simb
break
if builtins[s] is not None:
break
if builtins[s] is None:
builtins.__delitem__(s)
else:
self.builtin.__delitem__(s)
builtins.__delitem__(s)
self.pymathics = {}
# print("everything is clean")
return None
|
def clear_pymathics_modules(self):
from mathics.builtin import builtins, builtins_by_module
# Remove all modules that are not in mathics
# print("cleaning pymathics modules")
for key in list(builtins_by_module.keys()):
if key[:8] != "mathics.":
print("removing module ", key, " not in mathics.")
del builtins_by_module[key]
# print("reloading symbols from current builtins.")
for s in self.pymathics:
if s in self.builtin:
# If there was a true built-in definition for the symbol, restore it, else, remove he symbol.
if self.pymathics[s]:
self.builtin[s] = self.pymathics[s]
builtins[s] = None
for key, val in builtins_by_module:
for simb in val:
if simb.get_name() == s:
builtins[s] = simb
break
if builtins[s] is not None:
break
if builtins[s] is None:
builtins.__delitem__(s)
else:
self.builtin.__delitem__(s)
builtins.__delitem__(s)
self.pymathics = {}
# print("everything is clean")
return None
|
https://github.com/mathics/Mathics/issues/836
|
Mathics 1.1.dev0
on CPython 3.6.9 (default, Jul 17 2020, 12:50:27)
using SymPy 1.6.2, mpmath 1.1.0
Copyright (C) 2011-2020 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= a = 3
Out[1]= 3
In[2]:= Quit[]
removing module pymathics.natlang not in mathics.
In[1]:= LoadModule["pymathics.natlang"]
Out[1]= pymathics.natlang
In[2]:= Quit[]
removing module pymathics.natlang not in mathics.
Traceback (most recent call last):
File "/home/pablo/.local/bin/mathics", line 315, in <module>
main()
File "/home/pablo/.local/bin/mathics", line 298, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 286, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "/home/pablo/Documents/Mathics/mathics/core/evaluation.py", line 264, in evaluate
result = query.evaluate(self)
File "/home/pablo/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "/home/pablo/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "/home/pablo/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "/home/pablo/Documents/Mathics/mathics/builtin/assignment.py", line 2205, in apply
evaluation.definitions.clear_pymathics_modules()
File "/home/pablo/Documents/Mathics/mathics/core/definitions.py", line 157, in clear_pymathics_modules
for key, val in builtins_by_module:
ValueError: too many values to unpack (expected 2)
|
ValueError
|
def apply(self, evaluation):
"Exit"
exit()
|
def apply(self, evaluation):
"Exit[]"
sys.exit()
|
https://github.com/mathics/Mathics/issues/813
|
Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/Mathics/mathics/main.py", line 303, in <module>
main()
File "~/Documents/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 288, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "~/Documents/Mathics/mathics/core/evaluation.py", line 265, in evaluate
result = query.evaluate(self)
File "~/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "~/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "~/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "~/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "~/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "~/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "~/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "~/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "~/Documents/Mathics/mathics/builtin/evaluation.py", line 465, in apply
sys.exit()
NameError: name 'sys' is not defined
|
NameError
|
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
exit(n.get_int_value())
|
def apply_n(self, n, evaluation):
"Exit[n_Integer]"
sys.exit(n.get_int_value())
|
https://github.com/mathics/Mathics/issues/813
|
Copyright (C) 2011-2016 The Mathics Team.
This program comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions.
See the documentation for the full license.
Quit by pressing CONTROL-D
In[1]:= Quit[]
Traceback (most recent call last):
File "~/Documents/Mathics/mathics/main.py", line 303, in <module>
main()
File "~/Documents/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 288, in evaluate
result = run_with_timeout_and_stack(evaluate, timeout)
File "~/Documents/Mathics/mathics/core/evaluation.py", line 95, in run_with_timeout_and_stack
return request()
File "~/Documents/Mathics/mathics/core/evaluation.py", line 265, in evaluate
result = query.evaluate(self)
File "~/Documents/Mathics/mathics/core/expression.py", line 853, in evaluate
expr, reevaluate = expr.evaluate_next(evaluation)
File "~/Documents/Mathics/mathics/core/expression.py", line 975, in evaluate_next
result = rule.apply(new, evaluation, fully=False)
File "~/Documents/Mathics/mathics/core/rules.py", line 63, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "~/Documents/Mathics/mathics/core/pattern.py", line 203, in match
yield_head, expression.get_head(), vars, evaluation)
File "~/Documents/Mathics/mathics/core/pattern.py", line 132, in match
yield_func(vars, None)
File "~/Documents/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_func(head_vars, None)
File "~/Documents/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(expression, vars, options, evaluation)
File "~/Documents/Mathics/mathics/core/rules.py", line 124, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "~/Documents/Mathics/mathics/builtin/evaluation.py", line 465, in apply
sys.exit()
NameError: name 'sys' is not defined
|
NameError
|
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
f = urllib2.urlopen(py_url)
try:
if sys.version_info >= (3, 0):
content_type = f.info().get_content_type()
else:
content_type = f.headers["content-type"]
os.write(temp_handle, f.read())
finally:
f.close()
def determine_filetype():
return mimetype_dict.get(content_type)
result = Import._import(temp_path, determine_filetype, elements, evaluation)
except HTTPError as e:
evaluation.message(
"FetchURL",
"httperr",
url,
"the server returned an HTTP status code of %s (%s)"
% (e.code, str(e.reason)),
)
return Symbol("$Failed")
except URLError as e: # see https://docs.python.org/3/howto/urllib2.html
if hasattr(e, "reason"):
evaluation.message("FetchURL", "httperr", url, str(e.reason))
elif hasattr(e, "code"):
evaluation.message(
"FetchURL", "httperr", url, "server returned %s" % e.code
)
return Symbol("$Failed")
except ValueError as e:
evaluation.message("FetchURL", "httperr", url, str(e))
return Symbol("$Failed")
finally:
os.unlink(temp_path)
return result
|
def apply(self, url, elements, evaluation):
"FetchURL[url_String, elements_]"
import tempfile
import os
py_url = url.get_string_value()
temp_handle, temp_path = tempfile.mkstemp(suffix="")
try:
with urllib2.urlopen(py_url) as f:
content_type = f.info().get_content_type()
os.write(temp_handle, f.read())
def determine_filetype():
return mimetype_dict.get(content_type)
result = Import._import(temp_path, determine_filetype, elements, evaluation)
except HTTPError as e:
evaluation.message(
"FetchURL",
"httperr",
url,
"the server returned an HTTP status code of %s (%s)"
% (e.code, str(e.reason)),
)
return Symbol("$Failed")
except URLError as e: # see https://docs.python.org/3/howto/urllib2.html
if hasattr(e, "reason"):
evaluation.message("FetchURL", "httperr", url, str(e.reason))
elif hasattr(e, "code"):
evaluation.message(
"FetchURL", "httperr", url, "server returned %s" % e.code
)
return Symbol("$Failed")
except ValueError as e:
evaluation.message("FetchURL", "httperr", url, str(e))
return Symbol("$Failed")
finally:
os.unlink(temp_path)
return result
|
https://github.com/mathics/Mathics/issues/562
|
In[1]:= Import["https://upload.wikimedia.org/wikipedia/en/2/24/Lenna.png"]
Traceback (most recent call last):
File "/home/angus/venv_pypy/bin/mathics", line 11, in <module>
load_entry_point('Mathics', 'console_scripts', 'mathics')()
File "/home/angus/Mathics/mathics/main.py", line 286, in main
result = evaluation.evaluate(query, timeout=settings.TIMEOUT)
File "/home/angus/Mathics/mathics/core/evaluation.py", line 257, in evaluate
result = run_with_timeout(evaluate, timeout)
File "/home/angus/Mathics/mathics/core/evaluation.py", line 76, in run_with_timeout
return request()
File "/home/angus/Mathics/mathics/core/evaluation.py", line 240, in evaluate
result = query.evaluate(self)
File "/home/angus/Mathics/mathics/core/expression.py", line 868, in evaluate
return result.evaluate(evaluation)
File "/home/angus/Mathics/mathics/core/expression.py", line 868, in evaluate
return result.evaluate(evaluation)
File "/home/angus/Mathics/mathics/core/expression.py", line 862, in evaluate
result = rule.apply(new, evaluation, fully=False)
File "/home/angus/Mathics/mathics/core/rules.py", line 73, in apply
yield_match, expression, {}, evaluation, fully=fully)
File "/home/angus/Mathics/mathics/core/pattern.py", line 206, in match
yield_head, expression.get_head(), vars, evaluation)
File "/home/angus/Mathics/mathics/core/pattern.py", line 135, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 198, in yield_head
yield_choice, expression, attributes, head_vars)
File "/home/angus/Mathics/mathics/core/pattern.py", line 321, in get_pre_choices
yield_func(vars)
File "/home/angus/Mathics/mathics/core/pattern.py", line 187, in yield_choice
wrap_oneid=expression.get_head_name() != 'System`MakeBoxes')
File "/home/angus/Mathics/mathics/core/pattern.py", line 478, in match_leaf
include_flattened=include_flattened)
File "/home/angus/Mathics/mathics/core/pattern.py", line 342, in get_wrappings
yield_func(items[0])
File "/home/angus/Mathics/mathics/core/pattern.py", line 474, in yield_wrapping
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 768, in match
self.pattern.match(yield_func, expression, new_vars, evaluation)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 951, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 466, in match_yield
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/core/pattern.py", line 478, in match_leaf
include_flattened=include_flattened)
File "/home/angus/Mathics/mathics/core/pattern.py", line 342, in get_wrappings
yield_func(items[0])
File "/home/angus/Mathics/mathics/core/pattern.py", line 474, in yield_wrapping
leaf_count=leaf_count, wrap_oneid=wrap_oneid)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 768, in match
self.pattern.match(yield_func, expression, new_vars, evaluation)
File "/home/angus/Mathics/mathics/builtin/patterns.py", line 953, in match
yield_func(vars, None)
File "/home/angus/Mathics/mathics/core/pattern.py", line 469, in match_yield
yield_func(new_vars, items_rest)
File "/home/angus/Mathics/mathics/core/pattern.py", line 458, in leaf_yield
(rest_expression[0] + items_rest[0], next_rest[1]))
File "/home/angus/Mathics/mathics/core/rules.py", line 39, in yield_match
new_expression = self.do_replace(vars, options, evaluation)
File "/home/angus/Mathics/mathics/core/rules.py", line 131, in do_replace
return self.function(evaluation=evaluation, **vars_noctx)
File "/home/angus/Mathics/mathics/builtin/importexport.py", line 393, in apply
with urllib2.urlopen(py_url) as f:
AttributeError: addinfourl instance has no attribute '__enter__'
|
AttributeError
|
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
try:
return {
"backend_cpu_percentage": "{}%".format(
backend_data["system"]["cpu_percentage"]
),
"number_of_running_analyses": len(
backend_data["analysis"]["current_analyses"]
),
}
except KeyError:
return {"backend_cpu_percentage": "n/a", "number_of_running_analyses": "n/a"}
|
def _get_system_stats(self):
with ConnectTo(StatisticDbViewer, self._config) as stats_db:
backend_data = stats_db.get_statistic("backend")
return {
"backend_cpu_percentage": backend_data["system"]["cpu_percentage"],
"number_of_running_analyses": len(backend_data["analysis"]["current_analyses"]),
}
|
https://github.com/fkie-cad/FACT_core/issues/448
|
[2020-07-07 09:46:38,595] ERROR in app: Exception on /ajax/stats/system [GET]
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/usr/local/lib/python3.8/dist-packages/flask_restful/__init__.py", line 272, in error_router
return original_handler(e)
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/usr/local/lib/python3.8/dist-packages/flask/_compat.py", line 39, in reraise
raise value
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python3.8/dist-packages/flask/app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "./web_interface/security/decorator.py", line 11, in decorated_view
return fn(*args, **kwargs)
File "./web_interface/components/ajax_routes.py", line 186, in _get_system_stats
'backend_cpu_percentage': backend_data['system']['cpu_percentage'],
KeyError: 'system'
|
KeyError
|
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
[
"unzip 3.3.9",
"rm 3.3.9",
"rm -rf ./web_js/jstree/vakata*",
"mv vakata* web_js/jstree",
],
"jstree",
)
wget_static_web_content(
"https://ajax.googleapis.com/ajax/libs/angularjs/1.4.8/angular.min.js",
".",
[],
"angularJS",
)
wget_static_web_content(
"https://github.com/chartjs/Chart.js/releases/download/v2.3.0/Chart.js",
".",
[],
"charts.js",
)
_build_highlight_js()
for css_url in [
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/css/bootstrap-datepicker.standalone.css",
]:
wget_static_web_content(css_url, "web_css", [])
for js_url in [
"https://cdnjs.cloudflare.com/ajax/libs/jquery/1.12.1/jquery.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/js/bootstrap-datepicker.js",
"https://raw.githubusercontent.com/moment/moment/develop/moment.js",
]:
wget_static_web_content(js_url, "web_js", [])
if not Path("web_css/fontawesome").exists():
wget_static_web_content(
"https://use.fontawesome.com/releases/v5.13.0/fontawesome-free-5.13.0-web.zip",
".",
[
"unzip fontawesome-free-5.13.0-web.zip",
"rm fontawesome-free-5.13.0-web.zip",
"mv fontawesome-free-5.13.0-web web_css/fontawesome",
],
)
if not Path("bootstrap3-editable").exists():
wget_static_web_content(
"https://vitalets.github.io/x-editable/assets/zip/bootstrap3-editable-1.5.1.zip",
".",
[
"unzip -o bootstrap3-editable-1.5.1.zip",
"rm bootstrap3-editable-1.5.1.zip CHANGELOG.txt LICENSE-MIT README.md",
"rm -rf inputs-ext",
],
"x-editable",
)
|
def _install_css_and_js_files():
with OperateInDirectory("../web_interface/static"):
os.makedirs("web_css", exist_ok=True)
os.makedirs("web_js", exist_ok=True)
wget_static_web_content(
"https://github.com/vakata/jstree/zipball/3.3.9",
".",
["unzip 3.3.9", "rm 3.3.9", "mv vakata* web_js/jstree"],
"jstree",
)
wget_static_web_content(
"https://ajax.googleapis.com/ajax/libs/angularjs/1.4.8/angular.min.js",
".",
[],
"angularJS",
)
wget_static_web_content(
"https://github.com/chartjs/Chart.js/releases/download/v2.3.0/Chart.js",
".",
[],
"charts.js",
)
_build_highlight_js()
for css_url in [
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/css/bootstrap-datepicker.standalone.css",
]:
wget_static_web_content(css_url, "web_css", [])
for js_url in [
"https://cdnjs.cloudflare.com/ajax/libs/jquery/1.12.1/jquery.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
"https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
"https://cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.8.0/js/bootstrap-datepicker.js",
"https://raw.githubusercontent.com/moment/moment/develop/moment.js",
]:
wget_static_web_content(js_url, "web_js", [])
if not Path("web_css/fontawesome").exists():
wget_static_web_content(
"https://use.fontawesome.com/releases/v5.13.0/fontawesome-free-5.13.0-web.zip",
".",
[
"unzip fontawesome-free-5.13.0-web.zip",
"rm fontawesome-free-5.13.0-web.zip",
"mv fontawesome-free-5.13.0-web web_css/fontawesome",
],
)
if not Path("bootstrap3-editable").exists():
wget_static_web_content(
"https://vitalets.github.io/x-editable/assets/zip/bootstrap3-editable-1.5.1.zip",
".",
[
"unzip -o bootstrap3-editable-1.5.1.zip",
"rm bootstrap3-editable-1.5.1.zip CHANGELOG.txt LICENSE-MIT README.md",
"rm -rf inputs-ext",
],
"x-editable",
)
|
https://github.com/fkie-cad/FACT_core/issues/392
|
[2020-04-16 10:42:50][frontend][INFO]: Install static jstree content
Traceback (most recent call last):
File "src/install.py", line 173, in <module>
install()
File "src/install.py", line 157, in install
frontend(not args.no_radare, args.nginx)
File "/home/weidenba/FACT_core/src/install/frontend.py", line 165, in main
_install_css_and_js_files()
File "/home/weidenba/FACT_core/src/install/frontend.py", line 107, in _install_css_and_js_files
wget_static_web_content('https://github.com/vakata/jstree/zipball/3.3.9', '.', ['unzip 3.3.9', 'rm 3.3.9', 'mv vakata* web_js/jstree'], 'jstree')
File "/home/weidenba/FACT_core/src/install/frontend.py", line 34, in wget_static_web_content
raise InstallationError('Problem in processing resource at {}\n{}'.format(url, action_output))
helperFunctions.install.InstallationError: Problem in processing resource at https://github.com/vakata/jstree/zipball/3.3.9
mv: cannot move 'vakata-jstree-a7f2242' to 'web_js/jstree/vakata-jstree-a7f2242': Directory not empty
|
helperFunctions.install.InstallationError
|
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
[pie_off, pie_on, pie_partial, pie_invalid]
)
self.append_pie_stats_to_result_dict(
pie_invalid, pie_off, pie_on, pie_partial, stats, total_amount_of_files
)
|
def get_stats_pie(self, result, stats):
pie_invalid, pie_off, pie_on, pie_partial = self.extract_pie_data_from_analysis(
result
)
total_amount_of_files = self.calculate_total_files_for_pie(
pie_off, pie_on, pie_partial, pie_invalid
)
self.append_pie_stats_to_result_dict(
pie_invalid, pie_off, pie_on, pie_partial, stats, total_amount_of_files
)
|
https://github.com/fkie-cad/FACT_core/issues/88
|
[2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats()
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 45, in update_all_stats
self.db.update_statistic('exploit_mitigations', self._get_exploit_mitigations_stats())
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 102, in _get_exploit_mitigations_stats
self.get_stats_pie(result, stats)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 175, in get_stats_pie
total_amount_of_files = self.calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 194, in calculate_total_files_for_pie
total_amount_of_files = pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
IndexError: list index out of range
|
IndexError
|
def calculate_total_files_for_pie(pie_stats):
total_amount_of_files = 0
for item in pie_stats:
with suppress(IndexError):
total_amount_of_files += item[0][1]
return total_amount_of_files
|
def calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid):
if (
len(pie_on) > 0
or len(pie_off) > 0
or len(pie_partial) > 0
or len(pie_invalid) > 0
):
total_amount_of_files = (
pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
)
else:
total_amount_of_files = 0
return total_amount_of_files
|
https://github.com/fkie-cad/FACT_core/issues/88
|
[2018-03-28 13:02:04][update_statistic][INFO]: Try to start Mongo Server...
[2018-03-28 13:02:04][MongoMgr][INFO]: start local mongo database
Traceback (most recent call last):
File "src/update_statistic.py", line 48, in <module>
sys.exit(main())
File "src/update_statistic.py", line 38, in main
updater.update_all_stats()
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 45, in update_all_stats
self.db.update_statistic('exploit_mitigations', self._get_exploit_mitigations_stats())
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 102, in _get_exploit_mitigations_stats
self.get_stats_pie(result, stats)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 175, in get_stats_pie
total_amount_of_files = self.calculate_total_files_for_pie(pie_off, pie_on, pie_partial, pie_invalid)
File "/home/weidenba/git/FACT_core_github/src/statistic/update.py", line 194, in calculate_total_files_for_pie
total_amount_of_files = pie_on[0][1] + pie_off[0][1] + pie_partial[0][1] + pie_invalid[0][1]
IndexError: list index out of range
|
IndexError
|
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_features=sensitive_features)
self.pickled_estimator = pickle.dumps(estimator)
self.eps = eps
self.B = B
self.opt_lambda = opt_lambda
self.hs = pd.Series(dtype="float64")
self.classifiers = pd.Series(dtype="float64")
self.errors = pd.Series(dtype="float64")
self.gammas = pd.DataFrame()
self.lambdas = pd.DataFrame()
self.n = self.X.shape[0]
self.n_oracle_calls = 0
self.n_oracle_calls_dummy_returned = 0
self.oracle_execution_times = []
self.last_linprog_n_hs = 0
self.last_linprog_result = None
|
def __init__(
self, X, sensitive_features, y, estimator, constraints, eps, B, opt_lambda=True
):
self.X = X
self.constraints = constraints
self.constraints.load_data(X, y, sensitive_features=sensitive_features)
self.obj = self.constraints.default_objective()
self.obj.load_data(X, y, sensitive_features=sensitive_features)
self.pickled_estimator = pickle.dumps(estimator)
self.eps = eps
self.B = B
self.opt_lambda = opt_lambda
self.hs = pd.Series(dtype="float64")
self.classifiers = pd.Series(dtype="float64")
self.errors = pd.Series(dtype="float64")
self.gammas = pd.DataFrame()
self.lambdas = pd.DataFrame()
self.n = self.X.shape[0]
self.n_oracle_calls = 0
self.oracle_execution_times = []
self.last_linprog_n_hs = 0
self.last_linprog_result = None
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
redY_unique = np.unique(redY)
classifier = None
if len(redY_unique) == 1:
logger.debug("redY had single value. Using DummyClassifier")
classifier = DummyClassifier(strategy="constant", constant=redY_unique[0])
self.n_oracle_calls_dummy_returned += 1
else:
classifier = pickle.loads(self.pickled_estimator)
oracle_call_start_time = time()
classifier.fit(self.X, redY, sample_weight=redW)
self.oracle_execution_times.append(time() - oracle_call_start_time)
self.n_oracle_calls += 1
return classifier
|
def _call_oracle(self, lambda_vec):
signed_weights = self.obj.signed_weights() + self.constraints.signed_weights(
lambda_vec
)
redY = 1 * (signed_weights > 0)
redW = signed_weights.abs()
redW = self.n * redW / redW.sum()
classifier = pickle.loads(self.pickled_estimator)
oracle_call_start_time = time()
classifier.fit(self.X, redY, sample_weight=redW)
self.oracle_execution_times.append(time() - oracle_call_start_time)
self.n_oracle_calls += 1
return classifier
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights = None
self._last_t = None
self._best_t = None
self._n_oracle_calls = 0
self._n_oracle_calls_dummy_returned = 0
self._oracle_execution_times = None
self._lambda_vecs = pd.DataFrame()
self._lambda_vecs_LP = pd.DataFrame()
self._lambda_vecs_lagrangian = pd.DataFrame()
|
def __init__(self, estimator, constraints, eps=0.01, T=50, nu=None, eta_mul=2.0): # noqa: D103
self._estimator = estimator
self._constraints = constraints
self._eps = eps
self._T = T
self._nu = nu
self._eta_mul = eta_mul
self._best_gap = None
self._predictors = None
self._weights = None
self._last_t = None
self._best_t = None
self._n_oracle_calls = 0
self._oracle_execution_times = None
self._lambda_vecs = pd.DataFrame()
self._lambda_vecs_LP = pd.DataFrame()
self._lambda_vecs_lagrangian = pd.DataFrame()
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_features = _validate_and_reformat_input(X, y, **kwargs)
n = y_train.shape[0]
logger.debug("...Exponentiated Gradient STARTING")
B = 1 / self._eps
lagrangian = _Lagrangian(
X, sensitive_features, y_train, self._estimator, self._constraints, self._eps, B
)
theta = pd.Series(0, lagrangian.constraints.index)
Qsum = pd.Series(dtype="float64")
gaps_EG = []
gaps = []
Qs = []
last_regret_checked = _REGRET_CHECK_START_T
last_gap = np.PINF
for t in range(0, self._T):
logger.debug("...iter=%03d", t)
# set lambdas for every constraint
lambda_vec = B * np.exp(theta) / (1 + np.exp(theta).sum())
self._lambda_vecs[t] = lambda_vec
lambda_EG = self._lambda_vecs.mean(axis=1)
# select classifier according to best_h method
h, h_idx = lagrangian.best_h(lambda_vec)
if t == 0:
if self._nu is None:
self._nu = _ACCURACY_MUL * (h(X) - y_train).abs().std() / np.sqrt(n)
eta_min = self._nu / (2 * B)
eta = self._eta_mul / B
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
if h_idx not in Qsum.index:
Qsum.at[h_idx] = 0.0
Qsum[h_idx] += 1.0
gamma = lagrangian.gammas[h_idx]
Q_EG = Qsum / Qsum.sum()
result_EG = lagrangian.eval_gap(Q_EG, lambda_EG, self._nu)
gap_EG = result_EG.gap()
gaps_EG.append(gap_EG)
if t == 0 or not _RUN_LP_STEP:
gap_LP = np.PINF
else:
# saddle point optimization over the convex hull of
# classifiers returned so far
Q_LP, self._lambda_vecs_LP[t], result_LP = lagrangian.solve_linprog(
self._nu
)
gap_LP = result_LP.gap()
# keep values from exponentiated gradient or linear programming
if gap_EG < gap_LP:
Qs.append(Q_EG)
gaps.append(gap_EG)
else:
Qs.append(Q_LP)
gaps.append(gap_LP)
logger.debug(
"%seta=%.6f, L_low=%.3f, L=%.3f, L_high=%.3f, gap=%.6f, disp=%.3f, "
"err=%.3f, gap_LP=%.6f",
_INDENTATION,
eta,
result_EG.L_low,
result_EG.L,
result_EG.L_high,
gap_EG,
result_EG.gamma.max(),
result_EG.error,
gap_LP,
)
if (gaps[t] < self._nu) and (t >= _MIN_T):
# solution found
break
# update regret
if t >= last_regret_checked * _REGRET_CHECK_INCREASE_T:
best_gap = min(gaps_EG)
if best_gap > last_gap * _SHRINK_REGRET:
eta *= _SHRINK_ETA
last_regret_checked = t
last_gap = best_gap
# update theta based on learning rate
theta += eta * (gamma - self._eps)
# retain relevant result data
gaps_series = pd.Series(gaps)
gaps_best = gaps_series[gaps_series <= gaps_series.min() + _PRECISION]
self._best_t = gaps_best.index[-1]
self._best_gap = gaps[self._best_t]
self._weights = Qs[self._best_t]
self._hs = lagrangian.hs
for h_idx in self._hs.index:
if h_idx not in self._weights.index:
self._weights.at[h_idx] = 0.0
self._last_t = len(Qs) - 1
self._predictors = lagrangian.classifiers
self._n_oracle_calls = lagrangian.n_oracle_calls
self._n_oracle_calls_dummy_returned = lagrangian.n_oracle_calls_dummy_returned
self._oracle_execution_times = lagrangian.oracle_execution_times
self._lambda_vecs_lagrangian = lagrangian.lambdas
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
logger.debug(
"...last_t=%d, best_t=%d, best_gap=%.6f, n_oracle_calls=%d, n_hs=%d",
self._last_t,
self._best_t,
self._best_gap,
lagrangian.n_oracle_calls,
len(lagrangian.classifiers),
)
|
def fit(self, X, y, **kwargs):
"""Return a fair classifier under specified fairness constraints.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
"""
_, y_train, sensitive_features = _validate_and_reformat_input(X, y, **kwargs)
n = y_train.shape[0]
logger.debug("...Exponentiated Gradient STARTING")
B = 1 / self._eps
lagrangian = _Lagrangian(
X, sensitive_features, y_train, self._estimator, self._constraints, self._eps, B
)
theta = pd.Series(0, lagrangian.constraints.index)
Qsum = pd.Series(dtype="float64")
gaps_EG = []
gaps = []
Qs = []
last_regret_checked = _REGRET_CHECK_START_T
last_gap = np.PINF
for t in range(0, self._T):
logger.debug("...iter=%03d", t)
# set lambdas for every constraint
lambda_vec = B * np.exp(theta) / (1 + np.exp(theta).sum())
self._lambda_vecs[t] = lambda_vec
lambda_EG = self._lambda_vecs.mean(axis=1)
# select classifier according to best_h method
h, h_idx = lagrangian.best_h(lambda_vec)
if t == 0:
if self._nu is None:
self._nu = _ACCURACY_MUL * (h(X) - y_train).abs().std() / np.sqrt(n)
eta_min = self._nu / (2 * B)
eta = self._eta_mul / B
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
if h_idx not in Qsum.index:
Qsum.at[h_idx] = 0.0
Qsum[h_idx] += 1.0
gamma = lagrangian.gammas[h_idx]
Q_EG = Qsum / Qsum.sum()
result_EG = lagrangian.eval_gap(Q_EG, lambda_EG, self._nu)
gap_EG = result_EG.gap()
gaps_EG.append(gap_EG)
if t == 0 or not _RUN_LP_STEP:
gap_LP = np.PINF
else:
# saddle point optimization over the convex hull of
# classifiers returned so far
Q_LP, self._lambda_vecs_LP[t], result_LP = lagrangian.solve_linprog(
self._nu
)
gap_LP = result_LP.gap()
# keep values from exponentiated gradient or linear programming
if gap_EG < gap_LP:
Qs.append(Q_EG)
gaps.append(gap_EG)
else:
Qs.append(Q_LP)
gaps.append(gap_LP)
logger.debug(
"%seta=%.6f, L_low=%.3f, L=%.3f, L_high=%.3f, gap=%.6f, disp=%.3f, "
"err=%.3f, gap_LP=%.6f",
_INDENTATION,
eta,
result_EG.L_low,
result_EG.L,
result_EG.L_high,
gap_EG,
result_EG.gamma.max(),
result_EG.error,
gap_LP,
)
if (gaps[t] < self._nu) and (t >= _MIN_T):
# solution found
break
# update regret
if t >= last_regret_checked * _REGRET_CHECK_INCREASE_T:
best_gap = min(gaps_EG)
if best_gap > last_gap * _SHRINK_REGRET:
eta *= _SHRINK_ETA
last_regret_checked = t
last_gap = best_gap
# update theta based on learning rate
theta += eta * (gamma - self._eps)
# retain relevant result data
gaps_series = pd.Series(gaps)
gaps_best = gaps_series[gaps_series <= gaps_series.min() + _PRECISION]
self._best_t = gaps_best.index[-1]
self._best_gap = gaps[self._best_t]
self._weights = Qs[self._best_t]
self._hs = lagrangian.hs
for h_idx in self._hs.index:
if h_idx not in self._weights.index:
self._weights.at[h_idx] = 0.0
self._last_t = len(Qs) - 1
self._predictors = lagrangian.classifiers
self._n_oracle_calls = lagrangian.n_oracle_calls
self._oracle_execution_times = lagrangian.oracle_execution_times
self._lambda_vecs_lagrangian = lagrangian.lambdas
logger.debug(
"...eps=%.3f, B=%.1f, nu=%.6f, T=%d, eta_min=%.6f",
self._eps,
B,
self._nu,
self._T,
eta_min,
)
logger.debug(
"...last_t=%d, best_t=%d, best_gap=%.6f, n_oracle_calls=%d, n_hs=%d",
self._last_t,
self._best_t,
self._best_gap,
lagrangian.n_oracle_calls,
len(lagrangian.classifiers),
)
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
:param sensitive_features: A (currently) required keyword argument listing the
feature used by the constraints object
:type sensitive_features: numpy.ndarray, pandas.DataFrame, pandas.Series, or list (for now)
"""
if isinstance(self.constraints, ClassificationMoment):
logger.debug("Classification problem detected")
is_classification_reduction = True
else:
logger.debug("Regression problem detected")
is_classification_reduction = False
_, y_train, sensitive_features_train = _validate_and_reformat_input(
X, y, enforce_binary_labels=is_classification_reduction, **kwargs
)
kwargs[_KW_SENSITIVE_FEATURES] = sensitive_features_train
# Prep the parity constraints and objective
logger.debug("Preparing constraints and objective")
self.constraints.load_data(X, y_train, **kwargs)
objective = self.constraints.default_objective()
objective.load_data(X, y_train, **kwargs)
# Basis information
pos_basis = self.constraints.pos_basis
neg_basis = self.constraints.neg_basis
neg_allowed = self.constraints.neg_basis_present
objective_in_the_span = self.constraints.default_objective_lambda_vec is not None
if self.grid is None:
logger.debug("Creating grid of size %i", self.grid_size)
grid = _GridGenerator(
self.grid_size,
self.grid_limit,
pos_basis,
neg_basis,
neg_allowed,
objective_in_the_span,
self.grid_offset,
).grid
else:
logger.debug("Using supplied grid")
grid = self.grid
# Fit the estimates
logger.debug("Setup complete. Starting grid search")
for i in grid.columns:
lambda_vec = grid[i]
logger.debug("Obtaining weights")
weights = self.constraints.signed_weights(lambda_vec)
if not objective_in_the_span:
weights = weights + objective.signed_weights()
if is_classification_reduction:
logger.debug("Applying relabelling for classification problem")
y_reduction = 1 * (weights > 0)
weights = weights.abs()
else:
y_reduction = y_train
y_reduction_unique = np.unique(y_reduction)
if len(y_reduction_unique) == 1:
logger.debug("y_reduction had single value. Using DummyClassifier")
current_estimator = DummyClassifier(
strategy="constant", constant=y_reduction_unique[0]
)
else:
logger.debug("Using underlying estimator")
current_estimator = copy.deepcopy(self.estimator)
oracle_call_start_time = time()
current_estimator.fit(X, y_reduction, sample_weight=weights)
oracle_call_execution_time = time() - oracle_call_start_time
logger.debug("Call to estimator complete")
def predict_fct(X):
return current_estimator.predict(X)
self._predictors.append(current_estimator)
self._lambda_vecs[i] = lambda_vec
self._objectives.append(objective.gamma(predict_fct)[0])
self._gammas[i] = self.constraints.gamma(predict_fct)
self._oracle_execution_times.append(oracle_call_execution_time)
logger.debug("Selecting best_result")
if self.selection_rule == TRADEOFF_OPTIMIZATION:
def loss_fct(i):
return (
self.objective_weight * self._objectives[i]
+ self.constraint_weight * self._gammas[i].max()
)
losses = [loss_fct(i) for i in range(len(self._objectives))]
self._best_grid_index = losses.index(min(losses))
else:
raise RuntimeError("Unsupported selection rule")
return
|
def fit(self, X, y, **kwargs):
"""Run the grid search.
This will result in multiple copies of the
estimator being made, and the :code:`fit(X)` method
of each one called.
:param X: The feature matrix
:type X: numpy.ndarray or pandas.DataFrame
:param y: The label vector
:type y: numpy.ndarray, pandas.DataFrame, pandas.Series, or list
:param sensitive_features: A (currently) required keyword argument listing the
feature used by the constraints object
:type sensitive_features: numpy.ndarray, pandas.DataFrame, pandas.Series, or list (for now)
"""
if isinstance(self.constraints, ClassificationMoment):
logger.debug("Classification problem detected")
is_classification_reduction = True
else:
logger.debug("Regression problem detected")
is_classification_reduction = False
_, y_train, sensitive_features_train = _validate_and_reformat_input(
X, y, enforce_binary_labels=is_classification_reduction, **kwargs
)
kwargs[_KW_SENSITIVE_FEATURES] = sensitive_features_train
# Prep the parity constraints and objective
logger.debug("Preparing constraints and objective")
self.constraints.load_data(X, y_train, **kwargs)
objective = self.constraints.default_objective()
objective.load_data(X, y_train, **kwargs)
# Basis information
pos_basis = self.constraints.pos_basis
neg_basis = self.constraints.neg_basis
neg_allowed = self.constraints.neg_basis_present
objective_in_the_span = self.constraints.default_objective_lambda_vec is not None
if self.grid is None:
logger.debug("Creating grid of size %i", self.grid_size)
grid = _GridGenerator(
self.grid_size,
self.grid_limit,
pos_basis,
neg_basis,
neg_allowed,
objective_in_the_span,
self.grid_offset,
).grid
else:
logger.debug("Using supplied grid")
grid = self.grid
# Fit the estimates
logger.debug("Setup complete. Starting grid search")
for i in grid.columns:
lambda_vec = grid[i]
logger.debug("Obtaining weights")
weights = self.constraints.signed_weights(lambda_vec)
if not objective_in_the_span:
weights = weights + objective.signed_weights()
if is_classification_reduction:
logger.debug("Applying relabelling for classification problem")
y_reduction = 1 * (weights > 0)
weights = weights.abs()
else:
y_reduction = y_train
current_estimator = copy.deepcopy(self.estimator)
logger.debug("Calling underlying estimator")
oracle_call_start_time = time()
current_estimator.fit(X, y_reduction, sample_weight=weights)
oracle_call_execution_time = time() - oracle_call_start_time
logger.debug("Call to underlying estimator complete")
def predict_fct(X):
return current_estimator.predict(X)
self._predictors.append(current_estimator)
self._lambda_vecs[i] = lambda_vec
self._objectives.append(objective.gamma(predict_fct)[0])
self._gammas[i] = self.constraints.gamma(predict_fct)
self._oracle_execution_times.append(oracle_call_execution_time)
logger.debug("Selecting best_result")
if self.selection_rule == TRADEOFF_OPTIMIZATION:
def loss_fct(i):
return (
self.objective_weight * self._objectives[i]
+ self.constraint_weight * self._gammas[i].max()
)
losses = [loss_fct(i) for i in range(len(self._objectives))]
self._best_grid_index = losses.index(min(losses))
else:
raise RuntimeError("Unsupported selection rule")
return
|
https://github.com/fairlearn/fairlearn/issues/395
|
from sklearn.linear_model import LogisticRegression
LogisticRegression().fit([[1],[2],[3]], [0,0,0])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "~\AppData\Local\Continuum\anaconda3\lib\site-packages\sklearn\linear_model\_logistic.py", line 1558, in fit
" class: %r" % classes_[0])
ValueError: This solver needs samples of at least 2 classes in the data, but the data contains only one class: 0
|
ValueError
|
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(self.shape[i])))
for i, slice_shape in enumerate(slice_)
if not isinstance(slice_shape, int)
]
)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray):
value_shape = [dim for dim in value.shape if dim != 1]
expected_shape = [dim for dim in expected_value_shape if dim != 1]
if value_shape != expected_shape:
raise ValueShapeError(expected_value_shape, value.shape)
else:
value = value.reshape(expected_value_shape)
else:
expected_value_shape = (1,)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray) and value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
return value
|
def check_value_shape(self, value, slice_):
"""Checks if value can be set to the slice"""
if None not in self.shape and self.dtype != "O":
if not all([isinstance(sh, int) for sh in slice_]):
expected_value_shape = tuple(
[
len(range(*slice_shape.indices(self.shape[i])))
for i, slice_shape in enumerate(slice_)
if not isinstance(slice_shape, int)
]
)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray):
if value.shape[0] == 1 and expected_value_shape[0] != 1:
value = np.squeeze(value, axis=0)
if value.shape[-1] == 1 and expected_value_shape[-1] != 1:
value = np.squeeze(value, axis=-1)
if value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
else:
expected_value_shape = (1,)
if isinstance(value, list):
value = np.array(value)
if isinstance(value, np.ndarray) and value.shape != expected_value_shape:
raise ValueShapeError(expected_value_shape, value.shape)
return value
|
https://github.com/activeloopai/Hub/issues/316
|
Traceback (most recent call last):
File "examples/upload_mpi.py", line 52, in <module>
res_ds = out_ds.store(tag)
File "/Hub/hub/compute/transform.py", line 372, in store
n_results = self.store_shard(ds_in_shard, ds_out, start, token=token)
File "/Hub/hub/compute/transform.py", line 288, in store_shard
self.upload(
File "/Hub/hub/compute/transform.py", line 222, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Hub/hub/compute/transform.py", line 212, in upload_chunk
ds[key, i * length : i * length + batch_length] = batch
File "/Hub/hub/api/datasetview.py", line 131, in __setitem__
self.dataset._tensors[subpath][slice_list] = assign_value
File "/Hub/hub/store/dynamic_tensor.py", line 187, in __setitem__
max_shape = value[0].shape
AttributeError: 'float' object has no attribute 'shape'
|
AttributeError
|
def __init__(
self,
url: str,
mode: str = "a",
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = defaults.DEFAULT_MEMORY_CACHE_SIZE,
storage_cache: int = defaults.DEFAULT_STORAGE_CACHE_SIZE,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/write
Parameters
----------
url: str
The url where dataset is located/should be created
mode: str, optional (default to "a")
Python way to tell whether dataset is for read or write (ex. "r", "w", "a")
shape: tuple, optional
Tuple with (num_samples,) format, where num_samples is number of samples
schema: optional
Describes the data of a single sample. Hub schemas are used for that
Required for 'a' and 'w' modes
token: str or dict, optional
If url is refering to a place where authorization is required,
token is the parameter to pass the credentials, it can be filepath or dict
fs: optional
fs_map: optional
cache: int, optional
Size of the memory cache. Default is 64MB (2**26)
if 0, False or None, then cache is not used
storage_cache: int, optional
Size of the storage cache. Default is 256MB (2**28)
if 0, False or None, then storage cache is not used
lock_cache: bool, optional
Lock the cache for avoiding multiprocessing errors
"""
shape = norm_shape(shape)
if len(shape) != 1:
raise ShapeLengthException()
mode = mode or "a"
storage_cache = norm_cache(storage_cache) if cache else 0
cache = norm_cache(cache)
schema: SchemaDict = featurify(schema) if schema else None
self._url = url
self._token = token
self._mode = mode
self.tokenizer = tokenizer
self._fs, self._path = (fs, url) if fs else get_fs_and_path(self._url, token=token)
self._cache = cache
self._storage_cache = storage_cache
self.lock_cache = lock_cache
self.verison = "1.x"
needcreate = self._check_and_prepare_dir()
fs_map = fs_map or get_storage_map(
self._fs, self._path, cache, lock=lock_cache, storage_cache=storage_cache
)
self._fs_map = fs_map
self.username = None
self.dataset_name = None
if not needcreate:
self.meta = json.loads(fs_map["meta.json"].decode("utf-8"))
self._shape = tuple(self.meta["shape"])
self._schema = hub.schema.deserialize.deserialize(self.meta["schema"])
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._open_storage_tensors())
if shape != (None,) and shape != self._shape:
raise TypeError(
f"Shape in metafile [{self._shape}] and shape in arguments [{shape}] are !=, use mode='w' to overwrite dataset"
)
if schema is not None and sorted(schema.dict_.keys()) != sorted(
self._schema.dict_.keys()
):
raise TypeError(
"Schema in metafile and schema in arguments do not match, use mode='w' to overwrite dataset"
)
else:
if shape[0] is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
try:
if shape is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
self._schema = schema
self._shape = tuple(shape)
self.meta = self._store_meta()
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._generate_storage_tensors())
self.flush()
except Exception as e:
try:
self.close()
except Exception:
pass
self._fs.rm(self._path, recursive=True)
logger.error("Deleting the dataset " + traceback.format_exc() + str(e))
raise
if needcreate and (
self._path.startswith("s3://snark-hub-dev/")
or self._path.startswith("s3://snark-hub/")
):
subpath = self._path[5:]
spl = subpath.split("/")
if len(spl) < 4:
raise ValueError("Invalid Path for dataset")
self.username = spl[-2]
self.dataset_name = spl[-1]
HubControlClient().create_dataset_entry(
self.username, self.dataset_name, self.meta
)
|
def __init__(
self,
url: str,
mode: str = "a",
safe_mode: bool = False,
shape=None,
schema=None,
token=None,
fs=None,
fs_map=None,
cache: int = 2**26,
storage_cache: int = 2**28,
lock_cache=True,
tokenizer=None,
):
"""| Open a new or existing dataset for read/write
Parameters
----------
url: str
The url where dataset is located/should be created
mode: str, optional (default to "w")
Python way to tell whether dataset is for read or write (ex. "r", "w", "a")
safe_mode: bool, optional
if dataset exists it cannot be rewritten in safe mode, otherwise it lets to write the first time
shape: tuple, optional
Tuple with (num_samples,) format, where num_samples is number of samples
schema: optional
Describes the data of a single sample. Hub schemas are used for that
Required for 'a' and 'w' modes
token: str or dict, optional
If url is refering to a place where authorization is required,
token is the parameter to pass the credentials, it can be filepath or dict
fs: optional
fs_map: optional
cache: int, optional
Size of the memory cache. Default is 64MB (2**26)
if 0, False or None, then cache is not used
storage_cache: int, optional
Size of the storage cache. Default is 256MB (2**28)
if 0, False or None, then storage cache is not used
lock_cache: bool, optional
Lock the cache for avoiding multiprocessing errors
"""
shape = shape or (None,)
if isinstance(shape, int):
shape = [shape]
if shape is not None:
if len(tuple(shape)) != 1:
raise ShapeLengthException
if mode is None:
raise NoneValueException("mode")
if not cache:
storage_cache = False
self.url = url
self.token = token
self.mode = mode
self.tokenizer = tokenizer
self._fs, self._path = (fs, url) if fs else get_fs_and_path(self.url, token=token)
self.cache = cache
self._storage_cache = storage_cache
self.lock_cache = lock_cache
self.verison = "1.x"
needcreate = self._check_and_prepare_dir()
fs_map = fs_map or get_storage_map(
self._fs, self._path, cache, lock=lock_cache, storage_cache=storage_cache
)
self._fs_map = fs_map
if safe_mode and not needcreate:
mode = "r"
self.username = None
self.dataset_name = None
if not needcreate:
self.meta = json.loads(fs_map["meta.json"].decode("utf-8"))
self.shape = tuple(self.meta["shape"])
self.schema = hub.schema.deserialize.deserialize(self.meta["schema"])
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._open_storage_tensors())
else:
if shape[0] is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
try:
if shape is None:
raise ShapeArgumentNotFoundException()
if schema is None:
raise SchemaArgumentNotFoundException()
self.schema: HubSchema = featurify(schema)
self.shape = tuple(shape)
self.meta = self._store_meta()
self._flat_tensors = tuple(flatten(self.schema))
self._tensors = dict(self._generate_storage_tensors())
self.flush()
except Exception as e:
try:
self.close()
except Exception:
pass
self._fs.rm(self._path, recursive=True)
logger.error("Deleting the dataset " + traceback.format_exc() + str(e))
raise
if needcreate and (
self._path.startswith("s3://snark-hub-dev/")
or self._path.startswith("s3://snark-hub/")
):
subpath = self._path[5:]
spl = subpath.split("/")
if len(spl) < 4:
raise ValueError("Invalid Path for dataset")
self.username = spl[-2]
self.dataset_name = spl[-1]
HubControlClient().create_dataset_entry(
self.username, self.dataset_name, self.meta
)
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self._mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activeloop/store"), "rb") as f:
stored_username = json.load(f)["_id"]
current_username = path.split("/")[-2]
if stored_username != current_username:
try:
fs.listdir(path)
except:
raise WrongUsernameException(stored_username)
exist_meta = fs.exists(posixpath.join(path, "meta.json"))
if exist_meta:
if "w" in mode:
fs.rm(path, recursive=True)
fs.makedirs(path)
return True
return False
else:
if "r" in mode:
raise HubDatasetNotFoundException(path)
exist_dir = fs.exists(path)
if not exist_dir:
fs.makedirs(path)
elif get_file_count(fs, path) > 0:
if "w" in mode:
raise NotHubDatasetToOverwriteException()
else:
raise NotHubDatasetToAppendException()
return True
|
def _check_and_prepare_dir(self):
"""
Checks if input data is ok.
Creates or overwrites dataset folder.
Returns True dataset needs to be created opposed to read.
"""
fs, path, mode = self._fs, self._path, self.mode
if path.startswith("s3://"):
with open(posixpath.expanduser("~/.activeloop/store"), "rb") as f:
stored_username = json.load(f)["_id"]
current_username = path.split("/")[-2]
if stored_username != current_username:
try:
fs.listdir(path)
except:
raise WrongUsernameException(stored_username)
exist_meta = fs.exists(posixpath.join(path, "meta.json"))
if exist_meta:
if "w" in mode:
fs.rm(path, recursive=True)
fs.makedirs(path)
return True
return False
else:
if "r" in mode:
raise HubDatasetNotFoundException(path)
exist_dir = fs.exists(path)
if not exist_dir:
fs.makedirs(path)
elif get_file_count(fs, path) > 0:
if "w" in mode:
raise NotHubDatasetToOverwriteException()
else:
raise NotHubDatasetToAppendException()
return True
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self._shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._update_dataset_state()
|
def resize_shape(self, size: int) -> None:
"""Resize the shape of the dataset by resizing each tensor first dimension"""
if size == self.shape[0]:
return
self.shape = (int(size),)
self.meta = self._store_meta()
for t in self._tensors.values():
t.resize_shape(int(size))
self._update_dataset_state()
|
https://github.com/activeloopai/Hub/issues/318
|
Traceback (most recent call last):
File "examples/load.py", line 7, in <module>
ds = hub.load(path)
File "/Users/davitb/Git/Hub/hub/__init__.py", line 54, in load
return Dataset(tag)
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 141, in __init__
raise ShapeArgumentNotFoundException()
hub.exceptions.ShapeArgumentNotFoundException: Parameter 'shape' should be provided for Dataset creation.
|
hub.exceptions.ShapeArgumentNotFoundException
|
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(shape) == len(max_shape)
for s, ms in zip(shape, max_shape):
if not isinstance(ms, int):
raise HubException("MaxShape Dimension should be int")
if s is not None and s != ms:
raise HubException(
"""Dimension in shape cannot be != max_shape dimension,
if shape is not None """
)
assert s == ms or s is None and isinstance(ms, int)
return max_shape
|
def _get_max_shape(self, shape, max_shape):
if max_shape is None:
return tuple([s or self._int32max for s in shape])
elif isinstance(max_shape, int):
assert max_shape == shape[0]
return self._get_max_shape(shape, None)
else:
max_shape = tuple(max_shape)
assert len(shape) == len(max_shape)
for s, ms in zip(shape, max_shape):
assert s == ms or s is None and isinstance(ms, int)
return max_shape
|
https://github.com/activeloopai/Hub/issues/298
|
➜ feature_testing python upload_animals.py
26180
{'labels': ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10), 'image': Image(shape=(120, 120, 3), dtype='uint8', max_shape=(120, 120, 4))}
ClassLabel(shape=(), dtype='int64', names=['pecora', 'mucca', 'cane', 'ragno', 'cavallo', 'elefante', 'gallina', 'gatto', 'scoiattolo', 'farfalla'], num_classes=10)
Deleting the dataset Traceback (most recent call last):
File "/home/debo/Hub/hub/api/dataset.py", line 154, in __init__
self._tensors = dict(self._generate_storage_tensors())
File "/home/debo/Hub/hub/api/dataset.py", line 249, in _generate_storage_tensors
yield t_path, DynamicTensor(
File "/home/debo/Hub/hub/store/dynamic_tensor.py", line 67, in __init__
shapeDt = ShapeDetector(shape, max_shape, chunks, dtype)
File "/home/debo/Hub/hub/store/shape_detector.py", line 27, in __init__
self._max_shape = max_shape = self._get_max_shape(shape, max_shape)
File "/home/debo/Hub/hub/store/shape_detector.py", line 50, in _get_max_shape
assert s == ms or s is None and isinstance(ms, int)
AssertionError
Traceback (most recent call last):
File "upload_animals.py", line 23, in <module>
ds,labels = Dataset.from_directory(url,root_folder,image_shape,(ds_size,),'w+',max_shape=(120,120,4))
File "/home/debo/Hub/hub/api/dataset.py", line 680, in from_directory
ds = Dataset(
File "/home/debo/Hub/hub/api/dataset.py", line 154, in __init__
self._tensors = dict(self._generate_storage_tensors())
File "/home/debo/Hub/hub/api/dataset.py", line 249, in _generate_storage_tensors
yield t_path, DynamicTensor(
File "/home/debo/Hub/hub/store/dynamic_tensor.py", line 67, in __init__
shapeDt = ShapeDetector(shape, max_shape, chunks, dtype)
File "/home/debo/Hub/hub/store/shape_detector.py", line 27, in __init__
self._max_shape = max_shape = self._get_max_shape(shape, max_shape)
File "/home/debo/Hub/hub/store/shape_detector.py", line 50, in _get_max_shape
assert s == ms or s is None and isinstance(ms, int)
AssertionError
|
AssertionError
|
def verify_cli_version():
os.environ["OUTDATED_IGNORE"] = 1
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by running `pip3 install --upgrade snark`"
+ "\033[0m"
)
except Exception as e:
logger.error(str(e))
|
def verify_cli_version():
try:
version = pkg_resources.get_distribution(hub.__name__).version
is_outdated, latest_version = check_outdated(hub.__name__, version)
if is_outdated:
print(
"\033[93m"
+ "Hub is out of date. Please upgrade the package by running `pip3 install --upgrade snark`"
+ "\033[0m"
)
except Exception as e:
logger.error(str(e))
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["description"]
except Exception:
message = " "
logger.debug(f'Error received: status code: {code}, message: "{message}"')
if code == 400:
raise BadRequestException(response)
elif response.status_code == 401:
raise AuthenticationException()
elif response.status_code == 403:
raise AuthorizationException()
elif response.status_code == 404:
if message != " ":
raise NotFoundException(message)
else:
raise NotFoundException
elif response.status_code == 429:
raise OverLimitException(message)
elif response.status_code == 502:
raise BadGatewayException()
elif response.status_code == 504:
raise GatewayTimeoutException(message)
elif response.status_code == 423:
raise LockedException(message)
elif 500 <= response.status_code < 600:
if "Server under maintenance" in response.content.decode():
raise ServerException(
"Server under maintenance, please try again later."
)
else:
raise ServerException()
else:
msg = "An error occurred. Server response: {}".format(response.status_code)
raise HubException(message=msg)
|
def check_response_status(self, response):
"""
Check response status and throw corresponding exception on failure
"""
code = response.status_code
if code < 200 or code >= 300:
try:
message = response.json()["error"]
except Exception:
message = " "
logger.debug(f'Error received: status code: {code}, message: "{message}"')
if code == 400:
raise BadRequestException(response)
elif response.status_code == 401:
raise AuthenticationException()
elif response.status_code == 403:
raise AuthorizationException()
elif response.status_code == 404:
raise NotFoundException()
elif response.status_code == 429:
raise OverLimitException(message)
elif response.status_code == 502:
raise BadGatewayException()
elif response.status_code == 504:
raise GatewayTimeoutException(message)
elif response.status_code == 423:
raise LockedException(message)
elif 500 <= response.status_code < 600:
if "Server under maintenance" in response.content.decode():
raise ServerException(
"Server under maintenance, please try again later."
)
else:
raise ServerException()
else:
msg = "An error occurred. Server response: {}".format(response.status_code)
raise HubException(message=msg)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
Amount on local memory to cache locally, default 2e9 (2GB)
cloud: bool
Should be run locally or on the cloud
n_workers: int
number of concurrent workers, default to1
threads_per_worker: int
Number of threads per each worker
"""
print("initialized")
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
from dask.distributed import Client
global dask
global Client
global _client
if _client is not None:
_client.close()
if cloud:
raise NotImplementedError
elif not distributed:
client = None
dask.config.set(scheduler="threading")
hub.config.DISTRIBUTED = False
else:
n_workers = n_workers if n_workers is not None else psutil.cpu_count()
memory_limit = (
memory_limit
if memory_limit is not None
else psutil.virtual_memory().available
)
local_directory = os.path.join(
os.path.expanduser("~"),
".activeloop",
"tmp",
)
if not os.path.exists(local_directory):
os.makedirs(local_directory)
client = Client(
n_workers=n_workers,
processes=processes,
memory_limit=memory_limit,
threads_per_worker=threads_per_worker,
local_directory=local_directory,
)
config.DISTRIBUTED = True
_client = client
return client
|
def init(
token: str = "",
cloud=False,
n_workers=1,
memory_limit=None,
processes=False,
threads_per_worker=1,
distributed=True,
):
"""Initializes cluster either local or on the cloud
Parameters
----------
token: str
token provided by snark
cache: float
Amount on local memory to cache locally, default 2e9 (2GB)
cloud: bool
Should be run locally or on the cloud
n_workers: int
number of concurrent workers, default to1
threads_per_worker: int
Number of threads per each worker
"""
print("initialized")
global _client
if _client is not None:
_client.close()
if cloud:
raise NotImplementedError
elif not distributed:
client = None
dask.config.set(scheduler="threading")
hub.config.DISTRIBUTED = False
else:
n_workers = n_workers if n_workers is not None else psutil.cpu_count()
memory_limit = (
memory_limit
if memory_limit is not None
else psutil.virtual_memory().available
)
local_directory = os.path.join(
os.path.expanduser("~"),
".activeloop",
"tmp",
)
if not os.path.exists(local_directory):
os.makedirs(local_directory)
client = Client(
n_workers=n_workers,
processes=processes,
memory_limit=memory_limit,
threads_per_worker=threads_per_worker,
local_directory=local_directory,
)
config.DISTRIBUTED = True
_client = client
return client
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of generating dataset
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
meta = _meta_preprocess(generator.meta())
keys = sorted(meta.keys())
tasks = [dask.delayed(_generate, nout=len(meta))(generator, i) for i in input]
if len(tasks) == 0:
return Dataset(
{
key: Tensor(
meta[key],
dask.array.from_array(np.empty(shape=(0,), dtype="uint8")),
)
for ki, key in enumerate(keys)
}
)
return Dataset(
{
key: Tensor(
meta[key],
dask.array.concatenate(
[
dask.array.from_delayed(
task[ki],
shape=_dask_shape(meta[key]["shape"]),
dtype=meta[key]["dtype"],
)
for task in tasks
]
),
delayed_objs=[task[ki] for task in tasks],
)
for ki, key in enumerate(keys)
}
)
|
def generate(generator: DatasetGenerator, input) -> Dataset:
"""Generates dataset based on DatabaseGenerator class instance and iterable input
For every element in input runs generators __call__ function.
That function should return dict of numpy arrays containing single or multiple outputs for axis 0 of generating dataset
"""
meta = _meta_preprocess(generator.meta())
keys = sorted(meta.keys())
tasks = [dask.delayed(_generate, nout=len(meta))(generator, i) for i in input]
if len(tasks) == 0:
return Dataset(
{
key: Tensor(
meta[key],
dask.array.from_array(np.empty(shape=(0,), dtype="uint8")),
)
for ki, key in enumerate(keys)
}
)
return Dataset(
{
key: Tensor(
meta[key],
dask.array.concatenate(
[
dask.array.from_delayed(
task[ki],
shape=_dask_shape(meta[key]["shape"]),
dtype=meta[key]["dtype"],
)
for task in tasks
]
),
delayed_objs=[task[ki] for task in tasks],
)
for ki, key in enumerate(keys)
}
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
return Dataset(
{
key: Tensor(
_meta_concat([dataset._tensors[key]._meta for dataset in datasets]),
dask.array.concatenate(
[dataset._tensors[key]._array for dataset in datasets]
),
tuple(
itertools.chain(
*[
dataset._tensors[key]._delayed_objs or []
for dataset in datasets
]
)
),
)
for key in keys
}
)
|
def concat(datasets: Iterable[Dataset]) -> Dataset:
"""Concats multiple datasets into one along axis 0
This is equivalent to concat every tensor with the same key
"""
keys = [sorted(dataset._tensors.keys()) for dataset in datasets]
for key in keys:
assert key == keys[0]
keys = keys[0]
return Dataset(
{
key: Tensor(
_meta_concat([dataset._tensors[key]._meta for dataset in datasets]),
dask.array.concatenate(
[dataset._tensors[key]._array for dataset in datasets]
),
tuple(
itertools.chain(
*[
dataset._tensors[key]._delayed_objs or []
for dataset in datasets
]
)
),
)
for key in keys
}
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
shape = tensor.shape
self._len = tensor.count
self.verison = "0.x"
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
|
def __init__(self, tensors: Dict[str, Tensor], metainfo=dict()):
"""Creates dict given dict of tensors (name -> Tensor key value pairs)"""
self._tensors = tensors
self._metainfo = metainfo
shape = None
for name, tensor in tensors.items():
if shape is None or tensor.ndim > len(shape):
shape = tensor.shape
self._len = tensor.count
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import torch
global torch
except ImportError:
pass
return TorchDataset(self, transform, max_text_len)
|
def to_pytorch(self, transform=None, max_text_len=30):
"""
Transforms into pytorch dataset
Parameters
----------
transform: func
any transform that takes input a dictionary of a sample and returns transformed dictionary
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
return TorchDataset(self, transform, max_text_len)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except ImportError:
pass
def tf_gen(step=4):
with dask.config.set(scheduler="sync"):
for index in range(0, len(self), step):
arrs = [self[index : index + step].values() for i in range(1)]
arrs = list(map(lambda x: x._array, _flatten(arrs)))
arrs = dask.delayed(list, pure=False, nout=len(list(self.keys())))(arrs)
arrs = arrs.compute()
for ind, arr in enumerate(arrs):
if arr.dtype.type is np.str_:
arr = [
([ord(x) for x in sample.tolist()[0:max_text_len]])
for sample in arr
]
arr = np.array(
[
np.pad(
sample,
(0, max_text_len - len(sample)),
"constant",
constant_values=(32),
)
for sample in arr
]
)
arrs[ind] = arr
for i in range(step):
sample = {key: r[i] for key, r in zip(self[index].keys(), arrs)}
yield sample
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
output_shapes = {}
output_types = {}
for key in self.keys():
output_types[key] = tf_dtype(self._tensors[key].dtype)
output_shapes[key] = self._tensors[key].shape[1:]
# if this is a string, we change the type to int, as it's going to become ascii. shape is also set to None
if output_types[key] == tf.dtypes.as_dtype("string"):
output_types[key] = tf.dtypes.as_dtype("int8")
output_shapes[key] = None
# TODO use None for dimensions you don't know the length tf.TensorShape([None])
# FIXME Dataset Generator is not very good with multiprocessing but its good for fast tensorflow support
return tf.data.Dataset.from_generator(
tf_gen,
output_types=output_types,
# output_shapes=output_shapes,
)
|
def to_tensorflow(self, max_text_len=30):
"""
Transforms into tensorflow dataset
Parameters
----------
max_text_len: integer
the maximum length of text strings that would be stored. Strings longer than this would be snipped
"""
try:
import tensorflow as tf
except ImportError:
pass
def tf_gen(step=4):
with dask.config.set(scheduler="sync"):
for index in range(0, len(self), step):
arrs = [self[index : index + step].values() for i in range(1)]
arrs = list(map(lambda x: x._array, _flatten(arrs)))
arrs = dask.delayed(list, pure=False, nout=len(list(self.keys())))(arrs)
arrs = arrs.compute()
for ind, arr in enumerate(arrs):
if arr.dtype.type is np.str_:
arr = [
([ord(x) for x in sample.tolist()[0:max_text_len]])
for sample in arr
]
arr = np.array(
[
np.pad(
sample,
(0, max_text_len - len(sample)),
"constant",
constant_values=(32),
)
for sample in arr
]
)
arrs[ind] = arr
for i in range(step):
sample = {key: r[i] for key, r in zip(self[index].keys(), arrs)}
yield sample
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
output_shapes = {}
output_types = {}
for key in self.keys():
output_types[key] = tf_dtype(self._tensors[key].dtype)
output_shapes[key] = self._tensors[key].shape[1:]
# if this is a string, we change the type to int, as it's going to become ascii. shape is also set to None
if output_types[key] == tf.dtypes.as_dtype("string"):
output_types[key] = tf.dtypes.as_dtype("int8")
output_shapes[key] = None
# TODO use None for dimensions you don't know the length tf.TensorShape([None])
# FIXME Dataset Generator is not very good with multiprocessing but its good for fast tensorflow support
return tf.data.Dataset.from_generator(
tf_gen,
output_types=output_types,
# output_shapes=output_shapes,
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
logger.log(e)
return tf.variant
|
def tf_dtype(np_dtype):
try:
if "U" in np_dtype:
return tf.dtypes.as_dtype("string")
return tf.dtypes.as_dtype(np_dtype)
except Exception as e:
return tf.variant
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
raise HubDatasetNotFoundException(tag)
with fs.open(path_2, "r") as f:
ds_meta = json.loads(f.read())
for name in ds_meta["tensors"]:
assert fs.exists(f"{path}/{name}"), (
f"Tensor {name} of {tag} dataset does not exist"
)
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if ds_meta["len"] == 0:
logger.warning("The dataset is empty (has 0 samples)")
return Dataset(
{
name: Tensor(
tmeta,
dask.array.from_array(
np.empty(shape=(0,) + tuple(tmeta["shape"][1:]), dtype="uint8"),
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
len_ = ds_meta["len"]
# added reverse compatibility for previous versions
for name, tmeta in ds_meta["tensors"].items():
if "chunksize" not in tmeta:
tmeta["chunksize"] = 1
return Dataset(
{
name: Tensor(
tmeta,
_dask_concat(
[
dask.array.from_delayed(
dask.delayed(_numpy_load)(
fs,
f"{path}/{name}/{i}.npy",
codec_from_name(tmeta.get("dcompress")),
),
shape=(min(tmeta["chunksize"], len_ - i),)
+ tuple(tmeta["shape"][1:]),
dtype=tmeta["dtype"],
)
for i in range(0, len_, tmeta["chunksize"])
]
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
|
def load(tag, creds=None, session_creds=True) -> Dataset:
"""Load a dataset from repository using given url and credentials (optional)"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = f"{path}/meta.json"
if not fs.exists(path):
from hub.exceptions import DatasetNotFound
raise DatasetNotFound(tag)
with fs.open(path_2, "r") as f:
ds_meta = json.loads(f.read())
for name in ds_meta["tensors"]:
assert fs.exists(f"{path}/{name}"), (
f"Tensor {name} of {tag} dataset does not exist"
)
if ds_meta["len"] == 0:
logger.warning("The dataset is empty (has 0 samples)")
return Dataset(
{
name: Tensor(
tmeta,
dask.array.from_array(
np.empty(shape=(0,) + tuple(tmeta["shape"][1:]), dtype="uint8"),
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
len_ = ds_meta["len"]
# added reverse compatibility for previous versions
for name, tmeta in ds_meta["tensors"].items():
if "chunksize" not in tmeta:
tmeta["chunksize"] = 1
return Dataset(
{
name: Tensor(
tmeta,
_dask_concat(
[
dask.array.from_delayed(
dask.delayed(_numpy_load)(
fs,
f"{path}/{name}/{i}.npy",
codec_from_name(tmeta.get("dcompress")),
),
shape=(min(tmeta["chunksize"], len_ - i),)
+ tuple(tmeta["shape"][1:]),
dtype=tmeta["dtype"],
)
for i in range(0, len_, tmeta["chunksize"])
]
),
)
for name, tmeta in ds_meta["tensors"].items()
},
metainfo=ds_meta.get("metainfo"),
)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, mask, labels, ...)
dcompress: str, optional
Argument for compression algorithm, ignore this one, this one does not have any affect yet!
chunksize:
Information about how many items (from axis 0) should be stored in the same file if a command is given to save this tensor
Returns
-------
Tensor
newly generated tensor itself
"""
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
meta = {
"dtype": array.dtype,
"dtag": dtag,
"dcompress": dcompress,
"chunksize": chunksize,
}
if str(array.dtype) == "object":
array = dask.array.from_array(array, chunks=1)
else:
array = dask.array.from_array(array)
return Tensor(meta, array)
|
def from_array(array, dtag=None, dcompress=None, chunksize=None) -> Tensor:
"""Generates tensor from arraylike object
Parameters
----------
array : np.ndarray
Numpy array like object with shape, dtype, dims
dtag : str, optional
Describes type of the data stored in this array (image, mask, labels, ...)
dcompress: str, optional
Argument for compression algorithm, ignore this one, this one does not have any affect yet!
chunksize:
Information about how many items (from axis 0) should be stored in the same file if a command is given to save this tensor
Returns
-------
Tensor
newly generated tensor itself
"""
meta = {
"dtype": array.dtype,
"dtag": dtag,
"dcompress": dcompress,
"chunksize": chunksize,
}
if str(array.dtype) == "object":
array = dask.array.from_array(array, chunks=1)
else:
array = dask.array.from_array(array)
return Tensor(meta, array)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if "dask" not in sys.modules:
raise ModuleNotInstalledException("dask")
else:
import dask
import dask.array
global dask
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dtype = meta["dtype"]
self._dtag = meta.get("dtag")
self._dcompress = meta.get("dcompress")
self._dcompress_algo = meta.get("dcompress_algo")
self._dcompress_lvl = meta.get("dcompress_lvl")
self._chunksize = meta.get("chunksize")
|
def __init__(self, meta: dict, daskarray, delayed_objs: tuple = None):
if not meta.get("preprocessed"):
meta = Tensor._preprocess_meta(meta, daskarray)
self._meta = meta
self._array = daskarray
self._delayed_objs = delayed_objs
self._shape = _dask_shape_backward(daskarray.shape)
self._dtype = meta["dtype"]
self._dtag = meta.get("dtag")
self._dcompress = meta.get("dcompress")
self._dcompress_algo = meta.get("dcompress_algo")
self._dcompress_lvl = meta.get("dcompress_lvl")
self._chunksize = meta.get("chunksize")
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super(PermissionException, self).__init__(message=message)
|
def __init__(self, response):
message = f"No permision to store the dataset at {response}"
super().__init__(message=message)
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def _flatten(list_):
"""
Helper function to flatten the list
"""
return [item for sublist in list_ for item in sublist]
|
def _flatten(l):
"""
Helper function to flatten the list
"""
return [item for sublist in l for item in sublist]
|
https://github.com/activeloopai/Hub/issues/216
|
Traceback (most recent call last):
File "hub/compute/tests/test_transform.py", line 284, in <module>
test_threaded()
File "hub/compute/tests/test_transform.py", line 88, in test_threaded
create_classification_dataset(ds_init).store(
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 246, in store
ds = self.upload(results, url=url, token=token, progressbar=progressbar)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 177, in upload
list(self.map(upload_chunk, index_batched_values))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/threading.py", line 134, in map
return _pool.map(star(f), zip(*args)) # chunksize
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 364, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 771, in get
raise self._value
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 125, in worker
result = (True, func(*args, **kwds))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/multiprocess/pool.py", line 48, in mapstar
return list(map(*args))
File "/Users/davitb/Library/Python/3.8/lib/python/site-packages/pathos/helpers/mp_helper.py", line 15, in <lambda>
func = lambda args: f(*args)
File "/Users/davitb/Git/Hub/hub/compute/transform.py", line 167, in upload_chunk
ds[key, i * length + k] = el
File "/Users/davitb/Git/Hub/hub/api/dataset.py", line 316, in __setitem__
self._tensors[subpath][slice_list] = value
File "/Users/davitb/Git/Hub/hub/store/dynamic_tensor.py", line 207, in __setitem__
self._dynamic_tensor[slice_[0]]
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 571, in __getitem__
return self.get_basic_selection(selection, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 696, in get_basic_selection
return self._get_basic_selection_nd(selection=selection, out=out,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 739, in _get_basic_selection_nd
return self._get_selection(indexer=indexer, out=out, fields=fields)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1028, in _get_selection
self._chunk_getitem(chunk_coords, chunk_selection, out, out_selection,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1649, in _chunk_getitem
self._process_chunk(out, cdata, chunk_selection, drop_axes,
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1592, in _process_chunk
chunk = self._decode_chunk(cdata)
File "/usr/local/lib/python3.8/site-packages/zarr/core.py", line 1802, in _decode_chunk
chunk = chunk.reshape(self._chunks, order=self._order)
ValueError: cannot reshape array of size 0 into shape (50,3)
|
ValueError
|
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(func_object)
args = spec.args
else:
sig = inspect.signature(func)
args = [
param.name
for param in sig.parameters.values()
if param.kind
not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
]
# NOTE(kgriffs): Depending on the version of Python, 'self' may or may not
# be present, so we normalize the results by removing 'self' as needed.
# Note that this behavior varies between 3.x versions as well as between
# 3.x and 2.7.
if args and args[0] == "self":
args = args[1:]
return args
|
def get_argnames(func):
"""Introspecs the arguments of a callable.
Args:
func: The callable to introspect
Returns:
A list of argument names, excluding *arg and **kwargs
arguments.
"""
if six.PY2:
func_object = _get_func_if_nested(func)
spec = _get_argspec(func_object)
args = spec.args
else:
sig = inspect.signature(func)
args = [
param.name
for param in sig.parameters.values()
if param.kind
not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
]
# NOTE(kgriffs): Depending on the version of Python, 'self' may or may not
# be present, so we normalize the results by removing 'self' as needed.
# Note that this behavior varies between 3.x versions as well as between
# 3.x and 2.7.
if args[0] == "self":
args = args[1:]
return args
|
https://github.com/falconry/falcon/issues/1254
|
(falcon-bug-repro) falcon-bug-repro » python main.py
Traceback (most recent call last):
File "main.py", line 19, in <module>
MyMiddleware(),
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api.py", line 156, in __init__
middleware, independent_middleware=independent_middleware)
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/api_helpers.py", line 87, in prepare_middleware
args = util.get_argnames(process_response)
File "/Users/joshklar/.virtualenvs/falcon-bug-repro/lib/python3.6/site-packages/falcon/util/misc.py", line 317, in get_argnames
if args[0] == 'self':
IndexError: list index out of range
|
IndexError
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only US-ASCII characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to US-ASCII.
"""
if PY2:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
def set_header(self, name, value):
"""Set a header for this response to a given value.
Warning:
Calling this method overwrites the existing value, if any.
Warning:
For setting cookies, see instead :meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
# NOTE(kgriffs): normalize name by lowercasing it
self._headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only US-ASCII characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to US-ASCII.
"""
if PY2:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
def append_header(self, name, value):
"""Set or append a header for this response.
Warning:
If the header already exists, the new value will be appended
to it, delimited by a comma. Most header specifications support
this format, Set-Cookie being the notable exceptions.
Warning:
For setting cookies, see :py:meth:`~.set_cookie`
Args:
name (str): Header name (case-insensitive). The restrictions
noted below for the header's value also apply here.
value (str): Value for the header. Must be of type ``str`` or
``StringType`` and contain only ISO-8859-1 characters.
Under Python 2.x, the ``unicode`` type is also accepted,
although such strings are also limited to ISO-8859-1.
"""
name, value = self._encode_header(name, value)
name = name.lower()
if name in self._headers:
value = self._headers[name] + "," + value
self._headers[name] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType`` and
contain only US-ASCII characters. Under Python 2.x, the
``unicode`` type is also accepted, although such strings are
also limited to US-ASCII.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
if PY2:
for name, value in headers:
# NOTE(kgriffs): uwsgi fails with a TypeError if any header
# is not a str, so do the conversion here. It's actually
# faster to not do an isinstance check. str() will encode
# to US-ASCII.
name = str(name)
value = str(value)
_headers[name.lower()] = value
else:
for name, value in headers:
_headers[name.lower()] = value
|
def set_headers(self, headers):
"""Set several headers at once.
Warning:
Calling this method overwrites existing values, if any.
Args:
headers (dict or list): A dictionary of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name*
and *value* must be of type ``str`` or ``StringType`` and
contain only ISO-8859-1 characters. Under Python 2.x, the
``unicode`` type is also accepted, although such strings are
also limited to ISO-8859-1.
Note:
Falcon can process a list of tuples slightly faster
than a dict.
Raises:
ValueError: `headers` was not a ``dict`` or ``list`` of ``tuple``.
"""
if isinstance(headers, dict):
headers = headers.items()
# NOTE(kgriffs): We can't use dict.update because we have to
# normalize the header names.
_headers = self._headers
for name, value in headers:
name, value = self._encode_header(name, value)
_headers[name.lower()] = value
|
https://github.com/falconry/falcon/issues/413
|
Traceback (most recent call last):
File "/Users/kgriffs/Code/falcon/falcon/api.py", line 265, in __call__
start_response(resp.status, headers)
TypeError: http header value must be a string
|
TypeError
|
End of preview. Expand
in Data Studio
PyTraceBugs
Link to GitHub Repo: https://github.com/acheshkov/pytracebugs
- Downloads last month
- 128