formatting
This commit is contained in:
parent
a681245093
commit
ff85c93551
5 changed files with 641 additions and 549 deletions
25
__init__.py
25
__init__.py
|
@ -150,7 +150,6 @@ class ABC3D_glyph_properties(bpy.types.PropertyGroup):
|
|||
|
||||
|
||||
class ABC3D_text_properties(bpy.types.PropertyGroup):
|
||||
|
||||
def font_items_callback(self, context):
|
||||
items = []
|
||||
for f in Font.get_loaded_fonts_and_faces():
|
||||
|
@ -291,7 +290,7 @@ class ABC3D_data(bpy.types.PropertyGroup):
|
|||
)
|
||||
export_dir: bpy.props.StringProperty(
|
||||
name="Export Directory",
|
||||
description=f"The directory in which we will export fonts.\nIf it is blank, we will export to the addon assets path.\nThis is where the fonts are installed.",
|
||||
description="The directory in which we will export fonts.\nIf it is blank, we will export to the addon assets path.\nThis is where the fonts are installed.",
|
||||
subtype="DIR_PATH",
|
||||
)
|
||||
|
||||
|
@ -381,7 +380,7 @@ class ABC3D_PT_FontList(bpy.types.Panel):
|
|||
box.row().label(text=f"Face Name: {face_name}")
|
||||
n = 16
|
||||
n_rows = int(len(available_glyphs) / n)
|
||||
box.row().label(text=f"Glyphs:")
|
||||
box.row().label(text="Glyphs:")
|
||||
subbox = box.box()
|
||||
for i in range(0, n_rows + 1):
|
||||
text = "".join(
|
||||
|
@ -397,7 +396,7 @@ class ABC3D_PT_FontList(bpy.types.Panel):
|
|||
row.alignment = "CENTER"
|
||||
row.label(text=text)
|
||||
n_rows = int(len(loaded_glyphs) / n)
|
||||
box.row().label(text=f"Loaded/Used Glyphs:")
|
||||
box.row().label(text="Loaded/Used Glyphs:")
|
||||
subbox = box.box()
|
||||
for i in range(0, n_rows + 1):
|
||||
text = "".join(
|
||||
|
@ -595,7 +594,9 @@ class ABC3D_PT_FontCreation(bpy.types.Panel):
|
|||
layout.row().operator(
|
||||
f"{__name__}.create_font_from_objects", text="Create/Extend Font"
|
||||
)
|
||||
layout.row().operator(f"{__name__}.save_font_to_file", text="Export Font To File")
|
||||
layout.row().operator(
|
||||
f"{__name__}.save_font_to_file", text="Export Font To File"
|
||||
)
|
||||
box = layout.box()
|
||||
box.label(text="metrics")
|
||||
box.row().operator(
|
||||
|
@ -762,9 +763,9 @@ class ABC3D_OT_InstallFont(bpy.types.Operator):
|
|||
title=f"{__name__} Warning",
|
||||
icon="ERROR",
|
||||
message=[
|
||||
f"Could not install font.",
|
||||
"Could not install font.",
|
||||
f"We believe the font path ({font_path}) does not exist.",
|
||||
f"If this is an error, please let us know.",
|
||||
"If this is an error, please let us know.",
|
||||
],
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
@ -1200,7 +1201,7 @@ class ABC3D_OT_SaveFontToFile(bpy.types.Operator):
|
|||
n = 16
|
||||
n_rows = int(len(loaded_glyphs) / n)
|
||||
box = layout.box()
|
||||
box.row().label(text=f"Glyphs to be exported:")
|
||||
box.row().label(text="Glyphs to be exported:")
|
||||
subbox = box.box()
|
||||
for i in range(0, n_rows + 1):
|
||||
text = "".join(
|
||||
|
@ -1311,7 +1312,7 @@ class ABC3D_OT_SaveFontToFile(bpy.types.Operator):
|
|||
butils.remove_faces_from_metrics(obj)
|
||||
|
||||
bpy.app.timers.register(lambda: remove_faces(), first_interval=2)
|
||||
self.report({"INFO"}, f"did it")
|
||||
self.report({"INFO"}, "did it")
|
||||
|
||||
return {"FINISHED"}
|
||||
|
||||
|
@ -1365,9 +1366,7 @@ class ABC3D_OT_CreateFontFromObjects(bpy.types.Operator):
|
|||
row = layout.row()
|
||||
row.prop(self, "autodetect_names")
|
||||
first_object_name = context.selected_objects[-1].name
|
||||
self.font_name, self.face_name = (
|
||||
self.do_autodetect_names(first_object_name)
|
||||
)
|
||||
self.font_name, self.face_name = self.do_autodetect_names(first_object_name)
|
||||
if self.autodetect_names:
|
||||
scale_y = 0.5
|
||||
row = layout.row()
|
||||
|
@ -1663,7 +1662,7 @@ def on_depsgraph_update(scene, depsgraph):
|
|||
|
||||
def later():
|
||||
if (
|
||||
not "lock_depsgraph_update_ntimes" in scene.abc3d_data
|
||||
"lock_depsgraph_update_ntimes" not in scene.abc3d_data
|
||||
or scene.abc3d_data["lock_depsgraph_update_ntimes"] <= 0
|
||||
):
|
||||
butils.set_text_on_curve(
|
||||
|
|
312
addon_updater.py
312
addon_updater.py
|
@ -54,8 +54,8 @@ class SingletonUpdater:
|
|||
needed throughout the addon. It implements all the interfaces for running
|
||||
updates.
|
||||
"""
|
||||
def __init__(self):
|
||||
|
||||
def __init__(self):
|
||||
self._engine = ForgejoEngine()
|
||||
self._user = None
|
||||
self._repo = None
|
||||
|
@ -68,7 +68,7 @@ class SingletonUpdater:
|
|||
self._latest_release = None
|
||||
self._use_releases = False
|
||||
self._include_branches = False
|
||||
self._include_branch_list = ['master']
|
||||
self._include_branch_list = ["master"]
|
||||
self._include_branch_auto_check = False
|
||||
self._manual_only = False
|
||||
self._version_min_update = None
|
||||
|
@ -110,7 +110,8 @@ class SingletonUpdater:
|
|||
self._addon = __package__.lower()
|
||||
self._addon_package = __package__ # Must not change.
|
||||
self._updater_path = os.path.join(
|
||||
os.path.dirname(__file__), self._addon + "_updater")
|
||||
os.path.dirname(__file__), self._addon + "_updater"
|
||||
)
|
||||
self._addon_root = os.path.dirname(__file__)
|
||||
self._json = dict()
|
||||
self._error = None
|
||||
|
@ -202,11 +203,13 @@ class SingletonUpdater:
|
|||
|
||||
@property
|
||||
def check_interval(self):
|
||||
return (self._check_interval_enabled,
|
||||
self._check_interval_months,
|
||||
self._check_interval_days,
|
||||
self._check_interval_hours,
|
||||
self._check_interval_minutes)
|
||||
return (
|
||||
self._check_interval_enabled,
|
||||
self._check_interval_months,
|
||||
self._check_interval_days,
|
||||
self._check_interval_hours,
|
||||
self._check_interval_minutes,
|
||||
)
|
||||
|
||||
@property
|
||||
def current_version(self):
|
||||
|
@ -221,12 +224,10 @@ class SingletonUpdater:
|
|||
try:
|
||||
tuple(tuple_values)
|
||||
except:
|
||||
raise ValueError(
|
||||
"current_version must be a tuple of integers")
|
||||
raise ValueError("current_version must be a tuple of integers")
|
||||
for i in tuple_values:
|
||||
if type(i) is not int:
|
||||
raise ValueError(
|
||||
"current_version must be a tuple of integers")
|
||||
raise ValueError("current_version must be a tuple of integers")
|
||||
self._current_version = tuple(tuple_values)
|
||||
|
||||
@property
|
||||
|
@ -285,15 +286,15 @@ class SingletonUpdater:
|
|||
def include_branch_list(self, value):
|
||||
try:
|
||||
if value is None:
|
||||
self._include_branch_list = ['master']
|
||||
self._include_branch_list = ["master"]
|
||||
elif not isinstance(value, list) or len(value) == 0:
|
||||
raise ValueError(
|
||||
"include_branch_list should be a list of valid branches")
|
||||
"include_branch_list should be a list of valid branches"
|
||||
)
|
||||
else:
|
||||
self._include_branch_list = value
|
||||
except:
|
||||
raise ValueError(
|
||||
"include_branch_list should be a list of valid branches")
|
||||
raise ValueError("include_branch_list should be a list of valid branches")
|
||||
|
||||
@property
|
||||
def include_branches(self):
|
||||
|
@ -362,8 +363,7 @@ class SingletonUpdater:
|
|||
if value is None:
|
||||
self._remove_pre_update_patterns = list()
|
||||
elif not isinstance(value, list):
|
||||
raise ValueError(
|
||||
"remove_pre_update_patterns needs to be in a list format")
|
||||
raise ValueError("remove_pre_update_patterns needs to be in a list format")
|
||||
else:
|
||||
self._remove_pre_update_patterns = value
|
||||
|
||||
|
@ -548,8 +548,7 @@ class SingletonUpdater:
|
|||
tag_names.append(tag["name"])
|
||||
return tag_names
|
||||
|
||||
def set_check_interval(self, enabled=False,
|
||||
months=0, days=14, hours=0, minutes=0):
|
||||
def set_check_interval(self, enabled=False, months=0, days=14, hours=0, minutes=0):
|
||||
"""Set the time interval between automated checks, and if enabled.
|
||||
|
||||
Has enabled = False as default to not check against frequency,
|
||||
|
@ -582,7 +581,8 @@ class SingletonUpdater:
|
|||
|
||||
def __str__(self):
|
||||
return "Updater, with user: {a}, repository: {b}, url: {c}".format(
|
||||
a=self._user, b=self._repo, c=self.form_repo_url())
|
||||
a=self._user, b=self._repo, c=self.form_repo_url()
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# API-related functions
|
||||
|
@ -621,10 +621,7 @@ class SingletonUpdater:
|
|||
temp_branches.reverse()
|
||||
for branch in temp_branches:
|
||||
request = self.form_branch_url(branch)
|
||||
include = {
|
||||
"name": branch.title(),
|
||||
"zipball_url": request
|
||||
}
|
||||
include = {"name": branch.title(), "zipball_url": request}
|
||||
self._tags = [include] + self._tags # append to front
|
||||
|
||||
if self._tags is None:
|
||||
|
@ -643,13 +640,18 @@ class SingletonUpdater:
|
|||
if not self._error:
|
||||
self._tag_latest = self._tags[0]
|
||||
branch = self._include_branch_list[0]
|
||||
self.print_verbose("{} branch found, no releases: {}".format(
|
||||
branch, self._tags[0]))
|
||||
self.print_verbose(
|
||||
"{} branch found, no releases: {}".format(branch, self._tags[0])
|
||||
)
|
||||
|
||||
elif ((len(self._tags) - len(self._include_branch_list) == 0
|
||||
and self._include_branches)
|
||||
or (len(self._tags) == 0 and not self._include_branches)
|
||||
and self._prefiltered_tag_count > 0):
|
||||
elif (
|
||||
(
|
||||
len(self._tags) - len(self._include_branch_list) == 0
|
||||
and self._include_branches
|
||||
)
|
||||
or (len(self._tags) == 0 and not self._include_branches)
|
||||
and self._prefiltered_tag_count > 0
|
||||
):
|
||||
self._tag_latest = None
|
||||
self._error = "No releases available"
|
||||
self._error_msg = "No versions found within compatible version range"
|
||||
|
@ -659,13 +661,15 @@ class SingletonUpdater:
|
|||
if not self._include_branches:
|
||||
self._tag_latest = self._tags[0]
|
||||
self.print_verbose(
|
||||
"Most recent tag found:" + str(self._tags[0]['name']))
|
||||
"Most recent tag found:" + str(self._tags[0]["name"])
|
||||
)
|
||||
else:
|
||||
# Don't return branch if in list.
|
||||
n = len(self._include_branch_list)
|
||||
self._tag_latest = self._tags[n] # guaranteed at least len()=n+1
|
||||
self.print_verbose(
|
||||
"Most recent tag found:" + str(self._tags[n]['name']))
|
||||
"Most recent tag found:" + str(self._tags[n]["name"])
|
||||
)
|
||||
|
||||
def get_raw(self, url):
|
||||
"""All API calls to base url."""
|
||||
|
@ -680,13 +684,12 @@ class SingletonUpdater:
|
|||
# Setup private request headers if appropriate.
|
||||
if self._engine.token is not None:
|
||||
if self._engine.name == "gitlab":
|
||||
request.add_header('PRIVATE-TOKEN', self._engine.token)
|
||||
request.add_header("PRIVATE-TOKEN", self._engine.token)
|
||||
else:
|
||||
self.print_verbose("Tokens not setup for engine yet")
|
||||
|
||||
# Always set user agent.
|
||||
request.add_header(
|
||||
'User-Agent', "Python/" + str(platform.python_version()))
|
||||
request.add_header("User-Agent", "Python/" + str(platform.python_version()))
|
||||
|
||||
# Run the request.
|
||||
try:
|
||||
|
@ -747,8 +750,7 @@ class SingletonUpdater:
|
|||
error = None
|
||||
|
||||
# Make/clear the staging folder, to ensure the folder is always clean.
|
||||
self.print_verbose(
|
||||
"Preparing staging folder for download:\n" + str(local))
|
||||
self.print_verbose("Preparing staging folder for download:\n" + str(local))
|
||||
if os.path.isdir(local):
|
||||
try:
|
||||
shutil.rmtree(local)
|
||||
|
@ -782,17 +784,16 @@ class SingletonUpdater:
|
|||
# Setup private token if appropriate.
|
||||
if self._engine.token is not None:
|
||||
if self._engine.name == "gitlab":
|
||||
request.add_header('PRIVATE-TOKEN', self._engine.token)
|
||||
request.add_header("PRIVATE-TOKEN", self._engine.token)
|
||||
else:
|
||||
self.print_verbose(
|
||||
"Tokens not setup for selected engine yet")
|
||||
self.print_verbose("Tokens not setup for selected engine yet")
|
||||
|
||||
# Always set user agent
|
||||
request.add_header(
|
||||
'User-Agent', "Python/" + str(platform.python_version()))
|
||||
request.add_header("User-Agent", "Python/" + str(platform.python_version()))
|
||||
|
||||
self.url_retrieve(urllib.request.urlopen(request, context=context),
|
||||
self._source_zip)
|
||||
self.url_retrieve(
|
||||
urllib.request.urlopen(request, context=context), self._source_zip
|
||||
)
|
||||
# Add additional checks on file size being non-zero.
|
||||
self.print_verbose("Successfully downloaded update zip")
|
||||
return True
|
||||
|
@ -809,7 +810,8 @@ class SingletonUpdater:
|
|||
self.print_verbose("Backing up current addon folder")
|
||||
local = os.path.join(self._updater_path, "backup")
|
||||
tempdest = os.path.join(
|
||||
self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
|
||||
self._addon_root, os.pardir, self._addon + "_updater_backup_temp"
|
||||
)
|
||||
|
||||
self.print_verbose("Backup destination path: " + str(local))
|
||||
|
||||
|
@ -818,7 +820,8 @@ class SingletonUpdater:
|
|||
shutil.rmtree(local)
|
||||
except:
|
||||
self.print_verbose(
|
||||
"Failed to removed previous backup folder, continuing")
|
||||
"Failed to removed previous backup folder, continuing"
|
||||
)
|
||||
self.print_trace()
|
||||
|
||||
# Remove the temp folder.
|
||||
|
@ -827,16 +830,17 @@ class SingletonUpdater:
|
|||
try:
|
||||
shutil.rmtree(tempdest)
|
||||
except:
|
||||
self.print_verbose(
|
||||
"Failed to remove existing temp folder, continuing")
|
||||
self.print_verbose("Failed to remove existing temp folder, continuing")
|
||||
self.print_trace()
|
||||
|
||||
# Make a full addon copy, temporarily placed outside the addon folder.
|
||||
if self._backup_ignore_patterns is not None:
|
||||
try:
|
||||
shutil.copytree(self._addon_root, tempdest,
|
||||
ignore=shutil.ignore_patterns(
|
||||
*self._backup_ignore_patterns))
|
||||
shutil.copytree(
|
||||
self._addon_root,
|
||||
tempdest,
|
||||
ignore=shutil.ignore_patterns(*self._backup_ignore_patterns),
|
||||
)
|
||||
except:
|
||||
print("Failed to create backup, still attempting update.")
|
||||
self.print_trace()
|
||||
|
@ -853,7 +857,8 @@ class SingletonUpdater:
|
|||
# Save the date for future reference.
|
||||
now = datetime.now()
|
||||
self._json["backup_date"] = "{m}-{d}-{yr}".format(
|
||||
m=now.strftime("%B"), d=now.day, yr=now.year)
|
||||
m=now.strftime("%B"), d=now.day, yr=now.year
|
||||
)
|
||||
self.save_updater_json()
|
||||
|
||||
def restore_backup(self):
|
||||
|
@ -861,7 +866,8 @@ class SingletonUpdater:
|
|||
self.print_verbose("Restoring backup, backing up current addon folder")
|
||||
backuploc = os.path.join(self._updater_path, "backup")
|
||||
tempdest = os.path.join(
|
||||
self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
|
||||
self._addon_root, os.pardir, self._addon + "_updater_backup_temp"
|
||||
)
|
||||
tempdest = os.path.abspath(tempdest)
|
||||
|
||||
# Move instead contents back in place, instead of copy.
|
||||
|
@ -910,10 +916,8 @@ class SingletonUpdater:
|
|||
self._error_msg = "Failed to create extract directory"
|
||||
return -1
|
||||
|
||||
self.print_verbose(
|
||||
"Begin extracting source from zip:" + str(self._source_zip))
|
||||
self.print_verbose("Begin extracting source from zip:" + str(self._source_zip))
|
||||
with zipfile.ZipFile(self._source_zip, "r") as zfile:
|
||||
|
||||
if not zfile:
|
||||
self._error = "Install failed"
|
||||
self._error_msg = "Resulting file is not a zip, cannot extract"
|
||||
|
@ -923,19 +927,20 @@ class SingletonUpdater:
|
|||
# Now extract directly from the first subfolder (not root)
|
||||
# this avoids adding the first subfolder to the path length,
|
||||
# which can be too long if the download has the SHA in the name.
|
||||
zsep = '/' # Not using os.sep, always the / value even on windows.
|
||||
zsep = "/" # Not using os.sep, always the / value even on windows.
|
||||
for name in zfile.namelist():
|
||||
if zsep not in name:
|
||||
continue
|
||||
top_folder = name[:name.index(zsep) + 1]
|
||||
top_folder = name[: name.index(zsep) + 1]
|
||||
if name == top_folder + zsep:
|
||||
continue # skip top level folder
|
||||
sub_path = name[name.index(zsep) + 1:]
|
||||
sub_path = name[name.index(zsep) + 1 :]
|
||||
if name.endswith(zsep):
|
||||
try:
|
||||
os.mkdir(os.path.join(outdir, sub_path))
|
||||
self.print_verbose(
|
||||
"Extract - mkdir: " + os.path.join(outdir, sub_path))
|
||||
"Extract - mkdir: " + os.path.join(outdir, sub_path)
|
||||
)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
self._error = "Install failed"
|
||||
|
@ -947,7 +952,8 @@ class SingletonUpdater:
|
|||
data = zfile.read(name)
|
||||
outfile.write(data)
|
||||
self.print_verbose(
|
||||
"Extract - create: " + os.path.join(outdir, sub_path))
|
||||
"Extract - create: " + os.path.join(outdir, sub_path)
|
||||
)
|
||||
|
||||
self.print_verbose("Extracted source")
|
||||
|
||||
|
@ -959,8 +965,8 @@ class SingletonUpdater:
|
|||
return -1
|
||||
|
||||
if self._subfolder_path:
|
||||
self._subfolder_path.replace('/', os.path.sep)
|
||||
self._subfolder_path.replace('\\', os.path.sep)
|
||||
self._subfolder_path.replace("/", os.path.sep)
|
||||
self._subfolder_path.replace("\\", os.path.sep)
|
||||
|
||||
# Either directly in root of zip/one subfolder, or use specified path.
|
||||
if not os.path.isfile(os.path.join(unpath, "__init__.py")):
|
||||
|
@ -1018,25 +1024,31 @@ class SingletonUpdater:
|
|||
# Make sure that base is not a high level shared folder, but
|
||||
# is dedicated just to the addon itself.
|
||||
self.print_verbose(
|
||||
"clean=True, clearing addon folder to fresh install state")
|
||||
"clean=True, clearing addon folder to fresh install state"
|
||||
)
|
||||
|
||||
# Remove root files and folders (except update folder).
|
||||
files = [f for f in os.listdir(base)
|
||||
if os.path.isfile(os.path.join(base, f))]
|
||||
folders = [f for f in os.listdir(base)
|
||||
if os.path.isdir(os.path.join(base, f))]
|
||||
files = [
|
||||
f for f in os.listdir(base) if os.path.isfile(os.path.join(base, f))
|
||||
]
|
||||
folders = [
|
||||
f for f in os.listdir(base) if os.path.isdir(os.path.join(base, f))
|
||||
]
|
||||
|
||||
for f in files:
|
||||
os.remove(os.path.join(base, f))
|
||||
self.print_verbose(
|
||||
"Clean removing file {}".format(os.path.join(base, f)))
|
||||
"Clean removing file {}".format(os.path.join(base, f))
|
||||
)
|
||||
for f in folders:
|
||||
if os.path.join(base, f) is self._updater_path:
|
||||
continue
|
||||
shutil.rmtree(os.path.join(base, f))
|
||||
self.print_verbose(
|
||||
"Clean removing folder and contents {}".format(
|
||||
os.path.join(base, f)))
|
||||
os.path.join(base, f)
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as err:
|
||||
error = "failed to create clean existing addon folder"
|
||||
|
@ -1047,8 +1059,9 @@ class SingletonUpdater:
|
|||
# but avoid removing/altering backup and updater file.
|
||||
for path, dirs, files in os.walk(base):
|
||||
# Prune ie skip updater folder.
|
||||
dirs[:] = [d for d in dirs
|
||||
if os.path.join(path, d) not in [self._updater_path]]
|
||||
dirs[:] = [
|
||||
d for d in dirs if os.path.join(path, d) not in [self._updater_path]
|
||||
]
|
||||
for file in files:
|
||||
for pattern in self.remove_pre_update_patterns:
|
||||
if fnmatch.filter([file], pattern):
|
||||
|
@ -1066,8 +1079,9 @@ class SingletonUpdater:
|
|||
# actual file copying/replacements.
|
||||
for path, dirs, files in os.walk(merger):
|
||||
# Verify structure works to prune updater sub folder overwriting.
|
||||
dirs[:] = [d for d in dirs
|
||||
if os.path.join(path, d) not in [self._updater_path]]
|
||||
dirs[:] = [
|
||||
d for d in dirs if os.path.join(path, d) not in [self._updater_path]
|
||||
]
|
||||
rel_path = os.path.relpath(path, merger)
|
||||
dest_path = os.path.join(base, rel_path)
|
||||
if not os.path.exists(dest_path):
|
||||
|
@ -1090,23 +1104,27 @@ class SingletonUpdater:
|
|||
os.remove(dest_file)
|
||||
os.rename(srcFile, dest_file)
|
||||
self.print_verbose(
|
||||
"Overwrote file " + os.path.basename(dest_file))
|
||||
"Overwrote file " + os.path.basename(dest_file)
|
||||
)
|
||||
else:
|
||||
self.print_verbose(
|
||||
"Pattern not matched to {}, not overwritten".format(
|
||||
os.path.basename(dest_file)))
|
||||
os.path.basename(dest_file)
|
||||
)
|
||||
)
|
||||
else:
|
||||
# File did not previously exist, simply move it over.
|
||||
os.rename(srcFile, dest_file)
|
||||
self.print_verbose(
|
||||
"New file " + os.path.basename(dest_file))
|
||||
self.print_verbose("New file " + os.path.basename(dest_file))
|
||||
|
||||
# now remove the temp staging folder and downloaded zip
|
||||
try:
|
||||
shutil.rmtree(staging_path)
|
||||
except:
|
||||
error = ("Error: Failed to remove existing staging directory, "
|
||||
"consider manually removing ") + staging_path
|
||||
error = (
|
||||
"Error: Failed to remove existing staging directory, "
|
||||
"consider manually removing "
|
||||
) + staging_path
|
||||
self.print_verbose(error)
|
||||
self.print_trace()
|
||||
|
||||
|
@ -1168,12 +1186,12 @@ class SingletonUpdater:
|
|||
return ()
|
||||
|
||||
segments = list()
|
||||
tmp = ''
|
||||
tmp = ""
|
||||
for char in str(text):
|
||||
if not char.isdigit():
|
||||
if len(tmp) > 0:
|
||||
segments.append(int(tmp))
|
||||
tmp = ''
|
||||
tmp = ""
|
||||
else:
|
||||
tmp += char
|
||||
if len(tmp) > 0:
|
||||
|
@ -1184,7 +1202,7 @@ class SingletonUpdater:
|
|||
if not self._include_branches:
|
||||
return ()
|
||||
else:
|
||||
return (text)
|
||||
return text
|
||||
return tuple(segments)
|
||||
|
||||
def check_for_update_async(self, callback=None):
|
||||
|
@ -1193,7 +1211,8 @@ class SingletonUpdater:
|
|||
self._json is not None
|
||||
and "update_ready" in self._json
|
||||
and self._json["version_text"] != dict()
|
||||
and self._json["update_ready"])
|
||||
and self._json["update_ready"]
|
||||
)
|
||||
|
||||
if is_ready:
|
||||
self._update_ready = True
|
||||
|
@ -1210,15 +1229,13 @@ class SingletonUpdater:
|
|||
self.print_verbose("Skipping async check, already started")
|
||||
# already running the bg thread
|
||||
elif self._update_ready is None:
|
||||
print("{} updater: Running background check for update".format(
|
||||
self.addon))
|
||||
print("{} updater: Running background check for update".format(self.addon))
|
||||
self.start_async_check_update(False, callback)
|
||||
|
||||
def check_for_update_now(self, callback=None):
|
||||
self._error = None
|
||||
self._error_msg = None
|
||||
self.print_verbose(
|
||||
"Check update pressed, first getting current status")
|
||||
self.print_verbose("Check update pressed, first getting current status")
|
||||
if self._async_checking:
|
||||
self.print_verbose("Skipping async check, already started")
|
||||
return # already running the bg thread
|
||||
|
@ -1243,9 +1260,7 @@ class SingletonUpdater:
|
|||
# avoid running again in, just return past result if found
|
||||
# but if force now check, then still do it
|
||||
if self._update_ready is not None and not now:
|
||||
return (self._update_ready,
|
||||
self._update_version,
|
||||
self._update_link)
|
||||
return (self._update_ready, self._update_version, self._update_link)
|
||||
|
||||
if self._current_version is None:
|
||||
raise ValueError("current_version not yet defined")
|
||||
|
@ -1259,22 +1274,18 @@ class SingletonUpdater:
|
|||
self.set_updater_json() # self._json
|
||||
|
||||
if not now and not self.past_interval_timestamp():
|
||||
self.print_verbose(
|
||||
"Aborting check for updated, check interval not reached")
|
||||
self.print_verbose("Aborting check for updated, check interval not reached")
|
||||
return (False, None, None)
|
||||
|
||||
# check if using tags or releases
|
||||
# note that if called the first time, this will pull tags from online
|
||||
if self._fake_install:
|
||||
self.print_verbose(
|
||||
"fake_install = True, setting fake version as ready")
|
||||
self.print_verbose("fake_install = True, setting fake version as ready")
|
||||
self._update_ready = True
|
||||
self._update_version = "(999,999,999)"
|
||||
self._update_link = "http://127.0.0.1"
|
||||
|
||||
return (self._update_ready,
|
||||
self._update_version,
|
||||
self._update_link)
|
||||
return (self._update_ready, self._update_version, self._update_link)
|
||||
|
||||
# Primary internet call, sets self._tags and self._tag_latest.
|
||||
self.get_tags()
|
||||
|
@ -1327,7 +1338,6 @@ class SingletonUpdater:
|
|||
else:
|
||||
# Situation where branches not included.
|
||||
if new_version > self._current_version:
|
||||
|
||||
self._update_ready = True
|
||||
self._update_version = new_version
|
||||
self._update_link = link
|
||||
|
@ -1386,8 +1396,7 @@ class SingletonUpdater:
|
|||
if self._fake_install:
|
||||
# Change to True, to trigger the reload/"update installed" handler.
|
||||
self.print_verbose("fake_install=True")
|
||||
self.print_verbose(
|
||||
"Just reloading and running any handler triggers")
|
||||
self.print_verbose("Just reloading and running any handler triggers")
|
||||
self._json["just_updated"] = True
|
||||
self.save_updater_json()
|
||||
if self._backup_current is True:
|
||||
|
@ -1401,15 +1410,16 @@ class SingletonUpdater:
|
|||
self.print_verbose("Update stopped, new version not ready")
|
||||
if callback:
|
||||
callback(
|
||||
self._addon_package,
|
||||
"Update stopped, new version not ready")
|
||||
self._addon_package, "Update stopped, new version not ready"
|
||||
)
|
||||
return "Update stopped, new version not ready"
|
||||
elif self._update_link is None:
|
||||
# this shouldn't happen if update is ready
|
||||
self.print_verbose("Update stopped, update link unavailable")
|
||||
if callback:
|
||||
callback(self._addon_package,
|
||||
"Update stopped, update link unavailable")
|
||||
callback(
|
||||
self._addon_package, "Update stopped, update link unavailable"
|
||||
)
|
||||
return "Update stopped, update link unavailable"
|
||||
|
||||
if revert_tag is None:
|
||||
|
@ -1461,12 +1471,12 @@ class SingletonUpdater:
|
|||
return True
|
||||
|
||||
now = datetime.now()
|
||||
last_check = datetime.strptime(
|
||||
self._json["last_check"], "%Y-%m-%d %H:%M:%S.%f")
|
||||
last_check = datetime.strptime(self._json["last_check"], "%Y-%m-%d %H:%M:%S.%f")
|
||||
offset = timedelta(
|
||||
days=self._check_interval_days + 30 * self._check_interval_months,
|
||||
hours=self._check_interval_hours,
|
||||
minutes=self._check_interval_minutes)
|
||||
minutes=self._check_interval_minutes,
|
||||
)
|
||||
|
||||
delta = (now - offset) - last_check
|
||||
if delta.total_seconds() > 0:
|
||||
|
@ -1482,8 +1492,8 @@ class SingletonUpdater:
|
|||
Will also rename old file paths to addon-specific path if found.
|
||||
"""
|
||||
json_path = os.path.join(
|
||||
self._updater_path,
|
||||
"{}_updater_status.json".format(self._addon_package))
|
||||
self._updater_path, "{}_updater_status.json".format(self._addon_package)
|
||||
)
|
||||
old_json_path = os.path.join(self._updater_path, "updater_status.json")
|
||||
|
||||
# Rename old file if it exists.
|
||||
|
@ -1517,7 +1527,7 @@ class SingletonUpdater:
|
|||
"ignore": False,
|
||||
"just_restored": False,
|
||||
"just_updated": False,
|
||||
"version_text": dict()
|
||||
"version_text": dict(),
|
||||
}
|
||||
self.save_updater_json()
|
||||
|
||||
|
@ -1537,11 +1547,13 @@ class SingletonUpdater:
|
|||
|
||||
jpath = self.get_json_path()
|
||||
if not os.path.isdir(os.path.dirname(jpath)):
|
||||
print("State error: Directory does not exist, cannot save json: ",
|
||||
os.path.basename(jpath))
|
||||
print(
|
||||
"State error: Directory does not exist, cannot save json: ",
|
||||
os.path.basename(jpath),
|
||||
)
|
||||
return
|
||||
try:
|
||||
with open(jpath, 'w') as outf:
|
||||
with open(jpath, "w") as outf:
|
||||
data_out = json.dumps(self._json, indent=4)
|
||||
outf.write(data_out)
|
||||
except:
|
||||
|
@ -1575,8 +1587,13 @@ class SingletonUpdater:
|
|||
if self._async_checking:
|
||||
return
|
||||
self.print_verbose("Starting background checking thread")
|
||||
check_thread = threading.Thread(target=self.async_check_update,
|
||||
args=(now, callback,))
|
||||
check_thread = threading.Thread(
|
||||
target=self.async_check_update,
|
||||
args=(
|
||||
now,
|
||||
callback,
|
||||
),
|
||||
)
|
||||
check_thread.daemon = True
|
||||
self._check_thread = check_thread
|
||||
check_thread.start()
|
||||
|
@ -1630,17 +1647,19 @@ class SingletonUpdater:
|
|||
# Updater Engines
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
class BitbucketEngine:
|
||||
"""Integration to Bitbucket API for git-formatted repositories"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_url = 'https://api.bitbucket.org'
|
||||
self.api_url = "https://api.bitbucket.org"
|
||||
self.token = None
|
||||
self.name = "bitbucket"
|
||||
|
||||
def form_repo_url(self, updater):
|
||||
return "{}/2.0/repositories/{}/{}".format(
|
||||
self.api_url, updater.user, updater.repo)
|
||||
self.api_url, updater.user, updater.repo
|
||||
)
|
||||
|
||||
def form_tags_url(self, updater):
|
||||
return self.form_repo_url(updater) + "/refs/tags?sort=-name"
|
||||
|
@ -1650,31 +1669,28 @@ class BitbucketEngine:
|
|||
|
||||
def get_zip_url(self, name, updater):
|
||||
return "https://bitbucket.org/{user}/{repo}/get/{name}.zip".format(
|
||||
user=updater.user,
|
||||
repo=updater.repo,
|
||||
name=name)
|
||||
user=updater.user, repo=updater.repo, name=name
|
||||
)
|
||||
|
||||
def parse_tags(self, response, updater):
|
||||
if response is None:
|
||||
return list()
|
||||
return [
|
||||
{
|
||||
"name": tag["name"],
|
||||
"zipball_url": self.get_zip_url(tag["name"], updater)
|
||||
} for tag in response["values"]]
|
||||
{"name": tag["name"], "zipball_url": self.get_zip_url(tag["name"], updater)}
|
||||
for tag in response["values"]
|
||||
]
|
||||
|
||||
|
||||
class GithubEngine:
|
||||
"""Integration to Github API"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_url = 'https://api.github.com'
|
||||
self.api_url = "https://api.github.com"
|
||||
self.token = None
|
||||
self.name = "github"
|
||||
|
||||
def form_repo_url(self, updater):
|
||||
return "{}/repos/{}/{}".format(
|
||||
self.api_url, updater.user, updater.repo)
|
||||
return "{}/repos/{}/{}".format(self.api_url, updater.user, updater.repo)
|
||||
|
||||
def form_tags_url(self, updater):
|
||||
if updater.use_releases:
|
||||
|
@ -1698,7 +1714,7 @@ class GitlabEngine:
|
|||
"""Integration to GitLab API"""
|
||||
|
||||
def __init__(self):
|
||||
self.api_url = 'https://gitlab.com'
|
||||
self.api_url = "https://gitlab.com"
|
||||
self.token = None
|
||||
self.name = "gitlab"
|
||||
|
||||
|
@ -1710,19 +1726,19 @@ class GitlabEngine:
|
|||
|
||||
def form_branch_list_url(self, updater):
|
||||
# does not validate branch name.
|
||||
return "{}/repository/branches".format(
|
||||
self.form_repo_url(updater))
|
||||
return "{}/repository/branches".format(self.form_repo_url(updater))
|
||||
|
||||
def form_branch_url(self, branch, updater):
|
||||
# Could clash with tag names and if it does, it will download TAG zip
|
||||
# instead of branch zip to get direct path, would need.
|
||||
return "{}/repository/archive.zip?sha={}".format(
|
||||
self.form_repo_url(updater), branch)
|
||||
self.form_repo_url(updater), branch
|
||||
)
|
||||
|
||||
def get_zip_url(self, sha, updater):
|
||||
return "{base}/repository/archive.zip?sha={sha}".format(
|
||||
base=self.form_repo_url(updater),
|
||||
sha=sha)
|
||||
base=self.form_repo_url(updater), sha=sha
|
||||
)
|
||||
|
||||
# def get_commit_zip(self, id, updater):
|
||||
# return self.form_repo_url(updater)+"/repository/archive.zip?sha:"+id
|
||||
|
@ -1733,8 +1749,11 @@ class GitlabEngine:
|
|||
return [
|
||||
{
|
||||
"name": tag["name"],
|
||||
"zipball_url": self.get_zip_url(tag["commit"]["id"], updater)
|
||||
} for tag in response]
|
||||
"zipball_url": self.get_zip_url(tag["commit"]["id"], updater),
|
||||
}
|
||||
for tag in response
|
||||
]
|
||||
|
||||
|
||||
class ForgejoEngine:
|
||||
"""Integration to Forgejo/Gitea API"""
|
||||
|
@ -1742,7 +1761,7 @@ class ForgejoEngine:
|
|||
def __init__(self):
|
||||
# the api_url may be overwritten by form_repo_url
|
||||
# if updater.host is set
|
||||
self.api_url = 'https://codeberg.org'
|
||||
self.api_url = "https://codeberg.org"
|
||||
self.token = None
|
||||
self.name = "forgejo"
|
||||
|
||||
|
@ -1756,19 +1775,17 @@ class ForgejoEngine:
|
|||
|
||||
def form_branch_list_url(self, updater):
|
||||
# does not validate branch name.
|
||||
return "{}/branches".format(
|
||||
self.form_repo_url(updater))
|
||||
return "{}/branches".format(self.form_repo_url(updater))
|
||||
|
||||
def form_branch_url(self, branch, updater):
|
||||
# Could clash with tag names and if it does, it will download TAG zip
|
||||
# instead of branch zip to get direct path, would need.
|
||||
return "{}/archive/{}.zip".format(
|
||||
self.form_repo_url(updater), branch)
|
||||
return "{}/archive/{}.zip".format(self.form_repo_url(updater), branch)
|
||||
|
||||
def get_zip_url(self, sha, updater):
|
||||
return "{base}/archive/{sha}.zip".format(
|
||||
base=self.form_repo_url(updater),
|
||||
sha=sha)
|
||||
base=self.form_repo_url(updater), sha=sha
|
||||
)
|
||||
|
||||
# def get_commit_zip(self, id, updater):
|
||||
# return self.form_repo_url(updater)+"/repository/archive.zip?sha:"+id
|
||||
|
@ -1779,8 +1796,11 @@ class ForgejoEngine:
|
|||
return [
|
||||
{
|
||||
"name": tag["name"],
|
||||
"zipball_url": self.get_zip_url(tag["commit"]["sha"], updater)
|
||||
} for tag in response]
|
||||
"zipball_url": self.get_zip_url(tag["commit"]["sha"], updater),
|
||||
}
|
||||
for tag in response
|
||||
]
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# The module-shared class instance,
|
||||
|
|
|
@ -83,15 +83,17 @@ def make_annotations(cls):
|
|||
if not hasattr(bpy.app, "version") or bpy.app.version < (2, 80):
|
||||
return cls
|
||||
if bpy.app.version < (2, 93, 0):
|
||||
bl_props = {k: v for k, v in cls.__dict__.items()
|
||||
if isinstance(v, tuple)}
|
||||
bl_props = {k: v for k, v in cls.__dict__.items() if isinstance(v, tuple)}
|
||||
else:
|
||||
bl_props = {k: v for k, v in cls.__dict__.items()
|
||||
if isinstance(v, bpy.props._PropertyDeferred)}
|
||||
bl_props = {
|
||||
k: v
|
||||
for k, v in cls.__dict__.items()
|
||||
if isinstance(v, bpy.props._PropertyDeferred)
|
||||
}
|
||||
if bl_props:
|
||||
if '__annotations__' not in cls.__dict__:
|
||||
setattr(cls, '__annotations__', {})
|
||||
annotations = cls.__dict__['__annotations__']
|
||||
if "__annotations__" not in cls.__dict__:
|
||||
setattr(cls, "__annotations__", {})
|
||||
annotations = cls.__dict__["__annotations__"]
|
||||
for k, v in bl_props.items():
|
||||
annotations[k] = v
|
||||
delattr(cls, k)
|
||||
|
@ -129,20 +131,23 @@ def get_user_preferences(context=None):
|
|||
# Simple popup to prompt use to check for update & offer install if available.
|
||||
class AddonUpdaterInstallPopup(bpy.types.Operator):
|
||||
"""Check and install update if available"""
|
||||
|
||||
bl_label = "Update {x} addon".format(x=updater.addon)
|
||||
bl_idname = updater.addon + ".updater_install_popup"
|
||||
bl_description = "Popup to check and display current updates available"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
# if true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the
|
||||
# updater folder/backup folder remains
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description=("If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"),
|
||||
description=(
|
||||
"If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"
|
||||
),
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
options={"HIDDEN"},
|
||||
)
|
||||
|
||||
ignore_enum = bpy.props.EnumProperty(
|
||||
|
@ -151,9 +156,9 @@ class AddonUpdaterInstallPopup(bpy.types.Operator):
|
|||
items=[
|
||||
("install", "Update Now", "Install update now"),
|
||||
("ignore", "Ignore", "Ignore this update to prevent future popups"),
|
||||
("defer", "Defer", "Defer choice till next blender session")
|
||||
("defer", "Defer", "Defer choice till next blender session"),
|
||||
],
|
||||
options={'HIDDEN'}
|
||||
options={"HIDDEN"},
|
||||
)
|
||||
|
||||
def check(self, context):
|
||||
|
@ -170,10 +175,11 @@ class AddonUpdaterInstallPopup(bpy.types.Operator):
|
|||
elif updater.update_ready:
|
||||
col = layout.column()
|
||||
col.scale_y = 0.7
|
||||
col.label(text="Update {} ready!".format(updater.update_version),
|
||||
icon="LOOP_FORWARDS")
|
||||
col.label(text="Choose 'Update Now' & press OK to install, ",
|
||||
icon="BLANK1")
|
||||
col.label(
|
||||
text="Update {} ready!".format(updater.update_version),
|
||||
icon="LOOP_FORWARDS",
|
||||
)
|
||||
col.label(text="Choose 'Update Now' & press OK to install, ", icon="BLANK1")
|
||||
col.label(text="or click outside window to defer", icon="BLANK1")
|
||||
row = col.row()
|
||||
row.prop(self, "ignore_enum", expand=True)
|
||||
|
@ -194,22 +200,21 @@ class AddonUpdaterInstallPopup(bpy.types.Operator):
|
|||
def execute(self, context):
|
||||
# In case of error importing updater.
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
|
||||
if updater.manual_only:
|
||||
bpy.ops.wm.url_open(url=updater.website)
|
||||
elif updater.update_ready:
|
||||
|
||||
# Action based on enum selection.
|
||||
if self.ignore_enum == 'defer':
|
||||
return {'FINISHED'}
|
||||
elif self.ignore_enum == 'ignore':
|
||||
if self.ignore_enum == "defer":
|
||||
return {"FINISHED"}
|
||||
elif self.ignore_enum == "ignore":
|
||||
updater.ignore_update()
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
res = updater.run_update(force=False,
|
||||
callback=post_update_callback,
|
||||
clean=self.clean_install)
|
||||
res = updater.run_update(
|
||||
force=False, callback=post_update_callback, clean=self.clean_install
|
||||
)
|
||||
|
||||
# Should return 0, if not something happened.
|
||||
if updater.verbose:
|
||||
|
@ -222,84 +227,86 @@ class AddonUpdaterInstallPopup(bpy.types.Operator):
|
|||
|
||||
# Re-launch this dialog.
|
||||
atr = AddonUpdaterInstallPopup.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
else:
|
||||
updater.print_verbose("Doing nothing, not ready for update")
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
# User preference check-now operator
|
||||
class AddonUpdaterCheckNow(bpy.types.Operator):
|
||||
bl_label = "Check now for " + updater.addon + " update"
|
||||
bl_idname = updater.addon + ".updater_check_now"
|
||||
bl_description = "Check now for an update to the {} addon".format(
|
||||
updater.addon)
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_description = "Check now for an update to the {} addon".format(updater.addon)
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
def execute(self, context):
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
|
||||
if updater.async_checking and updater.error is None:
|
||||
# Check already happened.
|
||||
# Used here to just avoid constant applying settings below.
|
||||
# Ignoring if error, to prevent being stuck on the error screen.
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
|
||||
# apply the UI settings
|
||||
settings = get_user_preferences(context)
|
||||
if not settings:
|
||||
updater.print_verbose(
|
||||
"Could not get {} preferences, update check skipped".format(
|
||||
__package__))
|
||||
return {'CANCELLED'}
|
||||
"Could not get {} preferences, update check skipped".format(__package__)
|
||||
)
|
||||
return {"CANCELLED"}
|
||||
|
||||
updater.set_check_interval(
|
||||
enabled=settings.auto_check_update,
|
||||
months=settings.updater_interval_months,
|
||||
days=settings.updater_interval_days,
|
||||
hours=settings.updater_interval_hours,
|
||||
minutes=settings.updater_interval_minutes)
|
||||
minutes=settings.updater_interval_minutes,
|
||||
)
|
||||
|
||||
# Input is an optional callback function. This function should take a
|
||||
# bool input. If true: update ready, if false: no update ready.
|
||||
updater.check_for_update_now(ui_refresh)
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterUpdateNow(bpy.types.Operator):
|
||||
bl_label = "Update " + updater.addon + " addon now"
|
||||
bl_idname = updater.addon + ".updater_update_now"
|
||||
bl_description = "Update to the latest version of the {x} addon".format(
|
||||
x=updater.addon)
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
x=updater.addon
|
||||
)
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
# If true, run clean install - ie remove all files before adding new
|
||||
# equivalent to deleting the addon and reinstalling, except the updater
|
||||
# folder/backup folder remains.
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description=("If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"),
|
||||
description=(
|
||||
"If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"
|
||||
),
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
options={"HIDDEN"},
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
|
||||
# in case of error importing updater
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
|
||||
if updater.manual_only:
|
||||
bpy.ops.wm.url_open(url=updater.website)
|
||||
if updater.update_ready:
|
||||
# if it fails, offer to open the website instead
|
||||
try:
|
||||
res = updater.run_update(force=False,
|
||||
callback=post_update_callback,
|
||||
clean=self.clean_install)
|
||||
res = updater.run_update(
|
||||
force=False, callback=post_update_callback, clean=self.clean_install
|
||||
)
|
||||
|
||||
# Should return 0, if not something happened.
|
||||
if updater.verbose:
|
||||
|
@ -312,30 +319,30 @@ class AddonUpdaterUpdateNow(bpy.types.Operator):
|
|||
updater._error_msg = str(expt)
|
||||
updater.print_trace()
|
||||
atr = AddonUpdaterInstallManually.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
elif updater.update_ready is None:
|
||||
(update_ready, version, link) = updater.check_for_update(now=True)
|
||||
# Re-launch this dialog.
|
||||
atr = AddonUpdaterInstallPopup.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
|
||||
elif not updater.update_ready:
|
||||
self.report({'INFO'}, "Nothing to update")
|
||||
return {'CANCELLED'}
|
||||
self.report({"INFO"}, "Nothing to update")
|
||||
return {"CANCELLED"}
|
||||
else:
|
||||
self.report(
|
||||
{'ERROR'}, "Encountered a problem while trying to update")
|
||||
return {'CANCELLED'}
|
||||
self.report({"ERROR"}, "Encountered a problem while trying to update")
|
||||
return {"CANCELLED"}
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterUpdateTarget(bpy.types.Operator):
|
||||
bl_label = updater.addon + " version target"
|
||||
bl_idname = updater.addon + ".updater_update_target"
|
||||
bl_description = "Install a targeted version of the {x} addon".format(
|
||||
x=updater.addon)
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
x=updater.addon
|
||||
)
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
def target_version(self, context):
|
||||
# In case of error importing updater.
|
||||
|
@ -352,7 +359,7 @@ class AddonUpdaterUpdateTarget(bpy.types.Operator):
|
|||
target = bpy.props.EnumProperty(
|
||||
name="Target version to install",
|
||||
description="Select the version to install",
|
||||
items=target_version
|
||||
items=target_version,
|
||||
)
|
||||
|
||||
# If true, run clean install - ie remove all files before adding new
|
||||
|
@ -360,10 +367,12 @@ class AddonUpdaterUpdateTarget(bpy.types.Operator):
|
|||
# updater folder/backup folder remains.
|
||||
clean_install = bpy.props.BoolProperty(
|
||||
name="Clean install",
|
||||
description=("If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"),
|
||||
description=(
|
||||
"If enabled, completely clear the addon's folder before "
|
||||
"installing new update, creating a fresh install"
|
||||
),
|
||||
default=False,
|
||||
options={'HIDDEN'}
|
||||
options={"HIDDEN"},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -389,36 +398,35 @@ class AddonUpdaterUpdateTarget(bpy.types.Operator):
|
|||
def execute(self, context):
|
||||
# In case of error importing updater.
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
|
||||
res = updater.run_update(
|
||||
force=False,
|
||||
revert_tag=self.target,
|
||||
callback=post_update_callback,
|
||||
clean=self.clean_install)
|
||||
clean=self.clean_install,
|
||||
)
|
||||
|
||||
# Should return 0, if not something happened.
|
||||
if res == 0:
|
||||
updater.print_verbose("Updater returned successful")
|
||||
else:
|
||||
updater.print_verbose(
|
||||
"Updater returned {}, , error occurred".format(res))
|
||||
return {'CANCELLED'}
|
||||
updater.print_verbose("Updater returned {}, , error occurred".format(res))
|
||||
return {"CANCELLED"}
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterInstallManually(bpy.types.Operator):
|
||||
"""As a fallback, direct the user to download the addon manually"""
|
||||
|
||||
bl_label = "Install update manually"
|
||||
bl_idname = updater.addon + ".updater_install_manually"
|
||||
bl_description = "Proceed to manually install update"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
name="Error Occurred", default="", options={"HIDDEN"}
|
||||
)
|
||||
|
||||
def invoke(self, context, event):
|
||||
|
@ -435,10 +443,8 @@ class AddonUpdaterInstallManually(bpy.types.Operator):
|
|||
if self.error != "":
|
||||
col = layout.column()
|
||||
col.scale_y = 0.7
|
||||
col.label(text="There was an issue trying to auto-install",
|
||||
icon="ERROR")
|
||||
col.label(text="Press the download button below and install",
|
||||
icon="BLANK1")
|
||||
col.label(text="There was an issue trying to auto-install", icon="ERROR")
|
||||
col.label(text="Press the download button below and install", icon="BLANK1")
|
||||
col.label(text="the zip file like a normal addon.", icon="BLANK1")
|
||||
else:
|
||||
col = layout.column()
|
||||
|
@ -454,12 +460,10 @@ class AddonUpdaterInstallManually(bpy.types.Operator):
|
|||
|
||||
if updater.update_link is not None:
|
||||
row.operator(
|
||||
"wm.url_open",
|
||||
text="Direct download").url = updater.update_link
|
||||
"wm.url_open", text="Direct download"
|
||||
).url = updater.update_link
|
||||
else:
|
||||
row.operator(
|
||||
"wm.url_open",
|
||||
text="(failed to retrieve direct download)")
|
||||
row.operator("wm.url_open", text="(failed to retrieve direct download)")
|
||||
row.enabled = False
|
||||
|
||||
if updater.website is not None:
|
||||
|
@ -471,20 +475,19 @@ class AddonUpdaterInstallManually(bpy.types.Operator):
|
|||
row.label(text="See source website to download the update")
|
||||
|
||||
def execute(self, context):
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterUpdatedSuccessful(bpy.types.Operator):
|
||||
"""Addon in place, popup telling user it completed or what went wrong"""
|
||||
|
||||
bl_label = "Installation Report"
|
||||
bl_idname = updater.addon + ".updater_update_successful"
|
||||
bl_description = "Update installation response"
|
||||
bl_options = {'REGISTER', 'INTERNAL', 'UNDO'}
|
||||
bl_options = {"REGISTER", "INTERNAL", "UNDO"}
|
||||
|
||||
error = bpy.props.StringProperty(
|
||||
name="Error Occurred",
|
||||
default="",
|
||||
options={'HIDDEN'}
|
||||
name="Error Occurred", default="", options={"HIDDEN"}
|
||||
)
|
||||
|
||||
def invoke(self, context, event):
|
||||
|
@ -510,9 +513,8 @@ class AddonUpdaterUpdatedSuccessful(bpy.types.Operator):
|
|||
rw = col.row()
|
||||
rw.scale_y = 2
|
||||
rw.operator(
|
||||
"wm.url_open",
|
||||
text="Click for manual download.",
|
||||
icon="BLANK1").url = updater.website
|
||||
"wm.url_open", text="Click for manual download.", icon="BLANK1"
|
||||
).url = updater.website
|
||||
elif not updater.auto_reload_post_update:
|
||||
# Tell user to restart blender after an update/restore!
|
||||
if "just_restored" in saved and saved["just_restored"]:
|
||||
|
@ -521,20 +523,17 @@ class AddonUpdaterUpdatedSuccessful(bpy.types.Operator):
|
|||
alert_row = col.row()
|
||||
alert_row.alert = True
|
||||
alert_row.operator(
|
||||
"wm.quit_blender",
|
||||
text="Restart blender to reload",
|
||||
icon="BLANK1")
|
||||
"wm.quit_blender", text="Restart blender to reload", icon="BLANK1"
|
||||
)
|
||||
updater.json_reset_restore()
|
||||
else:
|
||||
col = layout.column()
|
||||
col.label(
|
||||
text="Addon successfully installed", icon="FILE_TICK")
|
||||
col.label(text="Addon successfully installed", icon="FILE_TICK")
|
||||
alert_row = col.row()
|
||||
alert_row.alert = True
|
||||
alert_row.operator(
|
||||
"wm.quit_blender",
|
||||
text="Restart blender to reload",
|
||||
icon="BLANK1")
|
||||
"wm.quit_blender", text="Restart blender to reload", icon="BLANK1"
|
||||
)
|
||||
|
||||
else:
|
||||
# reload addon, but still recommend they restart blender
|
||||
|
@ -543,28 +542,28 @@ class AddonUpdaterUpdatedSuccessful(bpy.types.Operator):
|
|||
col.scale_y = 0.7
|
||||
col.label(text="Addon restored", icon="RECOVER_LAST")
|
||||
col.label(
|
||||
text="Consider restarting blender to fully reload.",
|
||||
icon="BLANK1")
|
||||
text="Consider restarting blender to fully reload.", icon="BLANK1"
|
||||
)
|
||||
updater.json_reset_restore()
|
||||
else:
|
||||
col = layout.column()
|
||||
col.scale_y = 0.7
|
||||
col.label(text="Addon successfully installed", icon="FILE_TICK")
|
||||
col.label(
|
||||
text="Addon successfully installed", icon="FILE_TICK")
|
||||
col.label(
|
||||
text="Consider restarting blender to fully reload.",
|
||||
icon="BLANK1")
|
||||
text="Consider restarting blender to fully reload.", icon="BLANK1"
|
||||
)
|
||||
|
||||
def execute(self, context):
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterRestoreBackup(bpy.types.Operator):
|
||||
"""Restore addon from backup"""
|
||||
|
||||
bl_label = "Restore backup"
|
||||
bl_idname = updater.addon + ".updater_restore_backup"
|
||||
bl_description = "Restore addon from backup"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
|
@ -576,17 +575,18 @@ class AddonUpdaterRestoreBackup(bpy.types.Operator):
|
|||
def execute(self, context):
|
||||
# in case of error importing updater
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
updater.restore_backup()
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterIgnore(bpy.types.Operator):
|
||||
"""Ignore update to prevent future popups"""
|
||||
|
||||
bl_label = "Ignore update"
|
||||
bl_idname = updater.addon + ".updater_ignore"
|
||||
bl_description = "Ignore update to prevent future popups"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
|
@ -600,25 +600,26 @@ class AddonUpdaterIgnore(bpy.types.Operator):
|
|||
def execute(self, context):
|
||||
# in case of error importing updater
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
updater.ignore_update()
|
||||
self.report({"INFO"}, "Open addon preferences for updater options")
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class AddonUpdaterEndBackground(bpy.types.Operator):
|
||||
"""Stop checking for update in the background"""
|
||||
|
||||
bl_label = "End background check"
|
||||
bl_idname = updater.addon + ".end_background_check"
|
||||
bl_description = "Stop checking for update in the background"
|
||||
bl_options = {'REGISTER', 'INTERNAL'}
|
||||
bl_options = {"REGISTER", "INTERNAL"}
|
||||
|
||||
def execute(self, context):
|
||||
# in case of error importing updater
|
||||
if updater.invalid_updater:
|
||||
return {'CANCELLED'}
|
||||
return {"CANCELLED"}
|
||||
updater.stop_async_check_update()
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
@ -645,16 +646,16 @@ def updater_run_success_popup_handler(scene):
|
|||
|
||||
try:
|
||||
if "scene_update_post" in dir(bpy.app.handlers):
|
||||
bpy.app.handlers.scene_update_post.remove(
|
||||
updater_run_success_popup_handler)
|
||||
bpy.app.handlers.scene_update_post.remove(updater_run_success_popup_handler)
|
||||
else:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(
|
||||
updater_run_success_popup_handler)
|
||||
updater_run_success_popup_handler
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
|
||||
|
||||
@persistent
|
||||
|
@ -669,11 +670,11 @@ def updater_run_install_popup_handler(scene):
|
|||
|
||||
try:
|
||||
if "scene_update_post" in dir(bpy.app.handlers):
|
||||
bpy.app.handlers.scene_update_post.remove(
|
||||
updater_run_install_popup_handler)
|
||||
bpy.app.handlers.scene_update_post.remove(updater_run_install_popup_handler)
|
||||
else:
|
||||
bpy.app.handlers.depsgraph_update_post.remove(
|
||||
updater_run_install_popup_handler)
|
||||
updater_run_install_popup_handler
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
@ -687,12 +688,12 @@ def updater_run_install_popup_handler(scene):
|
|||
# User probably manually installed to get the up to date addon
|
||||
# in here. Clear out the update flag using this function.
|
||||
updater.print_verbose(
|
||||
"{} updater: appears user updated, clearing flag".format(
|
||||
updater.addon))
|
||||
"{} updater: appears user updated, clearing flag".format(updater.addon)
|
||||
)
|
||||
updater.json_reset_restore()
|
||||
return
|
||||
atr = AddonUpdaterInstallPopup.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
|
||||
|
||||
def background_update_callback(update_ready):
|
||||
|
@ -720,11 +721,9 @@ def background_update_callback(update_ready):
|
|||
return
|
||||
|
||||
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||
bpy.app.handlers.scene_update_post.append(
|
||||
updater_run_install_popup_handler)
|
||||
bpy.app.handlers.scene_update_post.append(updater_run_install_popup_handler)
|
||||
else: # 2.8+
|
||||
bpy.app.handlers.depsgraph_update_post.append(
|
||||
updater_run_install_popup_handler)
|
||||
bpy.app.handlers.depsgraph_update_post.append(updater_run_install_popup_handler)
|
||||
ran_auto_check_install_popup = True
|
||||
updater.print_verbose("Attempted popup prompt")
|
||||
|
||||
|
@ -748,17 +747,18 @@ def post_update_callback(module_name, res=None):
|
|||
# This is the same code as in conditional at the end of the register
|
||||
# function, ie if "auto_reload_post_update" == True, skip code.
|
||||
updater.print_verbose(
|
||||
"{} updater: Running post update callback".format(updater.addon))
|
||||
"{} updater: Running post update callback".format(updater.addon)
|
||||
)
|
||||
|
||||
atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
global ran_update_success_popup
|
||||
ran_update_success_popup = True
|
||||
else:
|
||||
# Some kind of error occurred and it was unable to install, offer
|
||||
# manual download instead.
|
||||
atr = AddonUpdaterUpdatedSuccessful.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT', error=res)
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT", error=res)
|
||||
return
|
||||
|
||||
|
||||
|
@ -791,11 +791,13 @@ def check_for_update_background():
|
|||
settings = get_user_preferences(bpy.context)
|
||||
if not settings:
|
||||
return
|
||||
updater.set_check_interval(enabled=settings.auto_check_update,
|
||||
months=settings.updater_interval_months,
|
||||
days=settings.updater_interval_days,
|
||||
hours=settings.updater_interval_hours,
|
||||
minutes=settings.updater_interval_minutes)
|
||||
updater.set_check_interval(
|
||||
enabled=settings.auto_check_update,
|
||||
months=settings.updater_interval_months,
|
||||
days=settings.updater_interval_days,
|
||||
hours=settings.updater_interval_hours,
|
||||
minutes=settings.updater_interval_minutes,
|
||||
)
|
||||
|
||||
# Input is an optional callback function. This function should take a bool
|
||||
# input, if true: update ready, if false: no update ready.
|
||||
|
@ -813,22 +815,25 @@ def check_for_update_nonthreaded(self, context):
|
|||
settings = get_user_preferences(bpy.context)
|
||||
if not settings:
|
||||
if updater.verbose:
|
||||
print("Could not get {} preferences, update check skipped".format(
|
||||
__package__))
|
||||
print(
|
||||
"Could not get {} preferences, update check skipped".format(__package__)
|
||||
)
|
||||
return
|
||||
updater.set_check_interval(enabled=settings.auto_check_update,
|
||||
months=settings.updater_interval_months,
|
||||
days=settings.updater_interval_days,
|
||||
hours=settings.updater_interval_hours,
|
||||
minutes=settings.updater_interval_minutes)
|
||||
updater.set_check_interval(
|
||||
enabled=settings.auto_check_update,
|
||||
months=settings.updater_interval_months,
|
||||
days=settings.updater_interval_days,
|
||||
hours=settings.updater_interval_hours,
|
||||
minutes=settings.updater_interval_minutes,
|
||||
)
|
||||
|
||||
(update_ready, version, link) = updater.check_for_update(now=False)
|
||||
if update_ready:
|
||||
atr = AddonUpdaterInstallPopup.bl_idname.split(".")
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])('INVOKE_DEFAULT')
|
||||
getattr(getattr(bpy.ops, atr[0]), atr[1])("INVOKE_DEFAULT")
|
||||
else:
|
||||
updater.print_verbose("No update ready")
|
||||
self.report({'INFO'}, "No update ready")
|
||||
self.report({"INFO"}, "No update ready")
|
||||
|
||||
|
||||
def show_reload_popup():
|
||||
|
@ -866,11 +871,9 @@ def show_reload_popup():
|
|||
return
|
||||
|
||||
if "scene_update_post" in dir(bpy.app.handlers): # 2.7x
|
||||
bpy.app.handlers.scene_update_post.append(
|
||||
updater_run_success_popup_handler)
|
||||
bpy.app.handlers.scene_update_post.append(updater_run_success_popup_handler)
|
||||
else: # 2.8+
|
||||
bpy.app.handlers.depsgraph_update_post.append(
|
||||
updater_run_success_popup_handler)
|
||||
bpy.app.handlers.depsgraph_update_post.append(updater_run_success_popup_handler)
|
||||
ran_update_success_popup = True
|
||||
|
||||
|
||||
|
@ -896,10 +899,7 @@ def update_notice_box_ui(self, context):
|
|||
col = box.column()
|
||||
alert_row = col.row()
|
||||
alert_row.alert = True
|
||||
alert_row.operator(
|
||||
"wm.quit_blender",
|
||||
text="Restart blender",
|
||||
icon="ERROR")
|
||||
alert_row.operator("wm.quit_blender", text="Restart blender", icon="ERROR")
|
||||
col.label(text="to complete update")
|
||||
return
|
||||
|
||||
|
@ -924,13 +924,13 @@ def update_notice_box_ui(self, context):
|
|||
colR = split.column(align=True)
|
||||
colR.scale_y = 1.5
|
||||
if not updater.manual_only:
|
||||
colR.operator(AddonUpdaterUpdateNow.bl_idname,
|
||||
text="Update", icon="LOOP_FORWARDS")
|
||||
colR.operator(
|
||||
AddonUpdaterUpdateNow.bl_idname, text="Update", icon="LOOP_FORWARDS"
|
||||
)
|
||||
col.operator("wm.url_open", text="Open website").url = updater.website
|
||||
# ops = col.operator("wm.url_open",text="Direct download")
|
||||
# ops.url=updater.update_link
|
||||
col.operator(AddonUpdaterInstallManually.bl_idname,
|
||||
text="Install manually")
|
||||
col.operator(AddonUpdaterInstallManually.bl_idname, text="Install manually")
|
||||
else:
|
||||
# ops = col.operator("wm.url_open", text="Direct download")
|
||||
# ops.url=updater.update_link
|
||||
|
@ -959,7 +959,7 @@ def update_settings_ui(self, context, element=None):
|
|||
return
|
||||
settings = get_user_preferences(context)
|
||||
if not settings:
|
||||
box.label(text="Error getting updater preferences", icon='ERROR')
|
||||
box.label(text="Error getting updater preferences", icon="ERROR")
|
||||
return
|
||||
|
||||
# auto-update settings
|
||||
|
@ -971,9 +971,11 @@ def update_settings_ui(self, context, element=None):
|
|||
saved_state = updater.json
|
||||
if "just_updated" in saved_state and saved_state["just_updated"]:
|
||||
row.alert = True
|
||||
row.operator("wm.quit_blender",
|
||||
text="Restart blender to complete update",
|
||||
icon="ERROR")
|
||||
row.operator(
|
||||
"wm.quit_blender",
|
||||
text="Restart blender to complete update",
|
||||
icon="ERROR",
|
||||
)
|
||||
return
|
||||
|
||||
split = layout_split(row, factor=0.4)
|
||||
|
@ -1007,16 +1009,13 @@ def update_settings_ui(self, context, element=None):
|
|||
split.scale_y = 2
|
||||
if "ssl" in updater.error_msg.lower():
|
||||
split.enabled = True
|
||||
split.operator(AddonUpdaterInstallManually.bl_idname,
|
||||
text=updater.error)
|
||||
split.operator(AddonUpdaterInstallManually.bl_idname, text=updater.error)
|
||||
else:
|
||||
split.enabled = False
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text=updater.error)
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text=updater.error)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready is None and not updater.async_checking:
|
||||
col.scale_y = 2
|
||||
|
@ -1032,61 +1031,62 @@ def update_settings_ui(self, context, element=None):
|
|||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterEndBackground.bl_idname, text="", icon="X")
|
||||
|
||||
elif updater.include_branches and \
|
||||
len(updater.tags) == len(updater.include_branch_list) and not \
|
||||
updater.manual_only:
|
||||
elif (
|
||||
updater.include_branches
|
||||
and len(updater.tags) == len(updater.include_branch_list)
|
||||
and not updater.manual_only
|
||||
):
|
||||
# No releases found, but still show the appropriate branch.
|
||||
sub_col = col.row(align=True)
|
||||
sub_col.scale_y = 1
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
update_now_txt = "Update directly to {}".format(
|
||||
updater.include_branch_list[0])
|
||||
update_now_txt = "Update directly to {}".format(updater.include_branch_list[0])
|
||||
split.operator(AddonUpdaterUpdateNow.bl_idname, text=update_now_txt)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready and not updater.manual_only:
|
||||
sub_col = col.row(align=True)
|
||||
sub_col.scale_y = 1
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterUpdateNow.bl_idname,
|
||||
text="Update now to " + str(updater.update_version))
|
||||
split.operator(
|
||||
AddonUpdaterUpdateNow.bl_idname,
|
||||
text="Update now to " + str(updater.update_version),
|
||||
)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready and updater.manual_only:
|
||||
col.scale_y = 2
|
||||
dl_now_txt = "Download " + str(updater.update_version)
|
||||
col.operator("wm.url_open",
|
||||
text=dl_now_txt).url = updater.website
|
||||
col.operator("wm.url_open", text=dl_now_txt).url = updater.website
|
||||
else: # i.e. that updater.update_ready == False.
|
||||
sub_col = col.row(align=True)
|
||||
sub_col.scale_y = 1
|
||||
split = sub_col.split(align=True)
|
||||
split.enabled = False
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="Addon is up to date")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="Addon is up to date")
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
if not updater.manual_only:
|
||||
col = row.column(align=True)
|
||||
if updater.include_branches and len(updater.include_branch_list) > 0:
|
||||
branch = updater.include_branch_list[0]
|
||||
col.operator(AddonUpdaterUpdateTarget.bl_idname,
|
||||
text="Install {} / old version".format(branch))
|
||||
col.operator(
|
||||
AddonUpdaterUpdateTarget.bl_idname,
|
||||
text="Install {} / old version".format(branch),
|
||||
)
|
||||
else:
|
||||
col.operator(AddonUpdaterUpdateTarget.bl_idname,
|
||||
text="(Re)install addon version")
|
||||
col.operator(
|
||||
AddonUpdaterUpdateTarget.bl_idname, text="(Re)install addon version"
|
||||
)
|
||||
last_date = "none found"
|
||||
backup_path = os.path.join(updater.stage_path, "backup")
|
||||
if "backup_date" in updater.json and os.path.isdir(backup_path):
|
||||
|
@ -1103,7 +1103,7 @@ def update_settings_ui(self, context, element=None):
|
|||
if updater.error is not None and updater.error_msg is not None:
|
||||
row.label(text=updater.error_msg)
|
||||
elif last_check:
|
||||
last_check = last_check[0: last_check.index(".")]
|
||||
last_check = last_check[0 : last_check.index(".")]
|
||||
row.label(text="Last update check: " + last_check)
|
||||
else:
|
||||
row.label(text="Last update check: Never")
|
||||
|
@ -1127,7 +1127,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
return
|
||||
settings = get_user_preferences(context)
|
||||
if not settings:
|
||||
row.label(text="Error getting updater preferences", icon='ERROR')
|
||||
row.label(text="Error getting updater preferences", icon="ERROR")
|
||||
return
|
||||
|
||||
# Special case to tell user to restart blender, if set that way.
|
||||
|
@ -1138,7 +1138,8 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
row.operator(
|
||||
"wm.quit_blender",
|
||||
text="Restart blender to complete update",
|
||||
icon="ERROR")
|
||||
icon="ERROR",
|
||||
)
|
||||
return
|
||||
|
||||
col = row.column()
|
||||
|
@ -1149,16 +1150,13 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
split.scale_y = 2
|
||||
if "ssl" in updater.error_msg.lower():
|
||||
split.enabled = True
|
||||
split.operator(AddonUpdaterInstallManually.bl_idname,
|
||||
text=updater.error)
|
||||
split.operator(AddonUpdaterInstallManually.bl_idname, text=updater.error)
|
||||
else:
|
||||
split.enabled = False
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text=updater.error)
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text=updater.error)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready is None and not updater.async_checking:
|
||||
col.scale_y = 2
|
||||
|
@ -1174,9 +1172,11 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterEndBackground.bl_idname, text="", icon="X")
|
||||
|
||||
elif updater.include_branches and \
|
||||
len(updater.tags) == len(updater.include_branch_list) and not \
|
||||
updater.manual_only:
|
||||
elif (
|
||||
updater.include_branches
|
||||
and len(updater.tags) == len(updater.include_branch_list)
|
||||
and not updater.manual_only
|
||||
):
|
||||
# No releases found, but still show the appropriate branch.
|
||||
sub_col = col.row(align=True)
|
||||
sub_col.scale_y = 1
|
||||
|
@ -1186,20 +1186,20 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
split.operator(AddonUpdaterUpdateNow.bl_idname, text=now_txt)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready and not updater.manual_only:
|
||||
sub_col = col.row(align=True)
|
||||
sub_col.scale_y = 1
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterUpdateNow.bl_idname,
|
||||
text="Update now to " + str(updater.update_version))
|
||||
split.operator(
|
||||
AddonUpdaterUpdateNow.bl_idname,
|
||||
text="Update now to " + str(updater.update_version),
|
||||
)
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
elif updater.update_ready and updater.manual_only:
|
||||
col.scale_y = 2
|
||||
|
@ -1211,12 +1211,10 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
split = sub_col.split(align=True)
|
||||
split.enabled = False
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="Addon is up to date")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="Addon is up to date")
|
||||
split = sub_col.split(align=True)
|
||||
split.scale_y = 2
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname,
|
||||
text="", icon="FILE_REFRESH")
|
||||
split.operator(AddonUpdaterCheckNow.bl_idname, text="", icon="FILE_REFRESH")
|
||||
|
||||
row = element.row()
|
||||
row.prop(settings, "auto_check_update")
|
||||
|
@ -1227,7 +1225,7 @@ def update_settings_ui_condensed(self, context, element=None):
|
|||
if updater.error is not None and updater.error_msg is not None:
|
||||
row.label(text=updater.error_msg)
|
||||
elif last_check != "" and last_check is not None:
|
||||
last_check = last_check[0: last_check.index(".")]
|
||||
last_check = last_check[0 : last_check.index(".")]
|
||||
row.label(text="Last check: " + last_check)
|
||||
else:
|
||||
row.label(text="Last check: Never")
|
||||
|
@ -1328,7 +1326,7 @@ classes = (
|
|||
AddonUpdaterUpdatedSuccessful,
|
||||
AddonUpdaterRestoreBackup,
|
||||
AddonUpdaterIgnore,
|
||||
AddonUpdaterEndBackground
|
||||
AddonUpdaterEndBackground,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1396,7 +1394,13 @@ def register(bl_info):
|
|||
updater.backup_current = True # True by default
|
||||
|
||||
# Sample ignore patterns for when creating backup of current during update.
|
||||
updater.backup_ignore_patterns = [".git", "__pycache__", "*.bat", ".gitignore", "*.exe"]
|
||||
updater.backup_ignore_patterns = [
|
||||
".git",
|
||||
"__pycache__",
|
||||
"*.bat",
|
||||
".gitignore",
|
||||
"*.exe",
|
||||
]
|
||||
# Alternate example patterns:
|
||||
# updater.backup_ignore_patterns = [".git", "__pycache__", "*.bat", ".gitignore", "*.exe"]
|
||||
|
||||
|
@ -1465,7 +1469,7 @@ def register(bl_info):
|
|||
# Note: updater.include_branch_list defaults to ['master'] branch if set to
|
||||
# none. Example targeting another multiple branches allowed to pull from:
|
||||
# updater.include_branch_list = ['master', 'dev']
|
||||
updater.include_branch_list = ['main', 'dev'] # None is the equivalent = ['master']
|
||||
updater.include_branch_list = ["main", "dev"] # None is the equivalent = ['master']
|
||||
|
||||
# Only allow manual install, thus prompting the user to open
|
||||
# the addon's web page to download, specifically: updater.website
|
||||
|
|
234
bimport.py
234
bimport.py
|
@ -1,10 +1,12 @@
|
|||
import bpy
|
||||
from bpy.props import (StringProperty,
|
||||
BoolProperty,
|
||||
EnumProperty,
|
||||
IntProperty,
|
||||
FloatProperty,
|
||||
CollectionProperty)
|
||||
from bpy.props import (
|
||||
StringProperty,
|
||||
BoolProperty,
|
||||
EnumProperty,
|
||||
IntProperty,
|
||||
FloatProperty,
|
||||
CollectionProperty,
|
||||
)
|
||||
from bpy.types import Operator
|
||||
from bpy_extras.io_utils import ImportHelper, ExportHelper
|
||||
from io_scene_gltf2 import ConvertGLTF2_Base
|
||||
|
@ -24,41 +26,50 @@ else:
|
|||
|
||||
# taken from blender_git/blender/scripts/addons/io_scene_gltf2/__init__.py
|
||||
|
||||
|
||||
def get_font_faces_in_file(filepath):
|
||||
from io_scene_gltf2.io.imp.gltf2_io_gltf import glTFImporter, ImportError
|
||||
|
||||
try:
|
||||
import_settings = { 'import_user_extensions': [] }
|
||||
import_settings = {"import_user_extensions": []}
|
||||
gltf_importer = glTFImporter(filepath, import_settings)
|
||||
gltf_importer.read()
|
||||
gltf_importer.checks()
|
||||
|
||||
out = []
|
||||
for node in gltf_importer.data.nodes:
|
||||
if type(node.extras) != type(None) \
|
||||
and "glyph" in node.extras \
|
||||
and not ("type" in node.extras and node.extras["type"] == "metrics") \
|
||||
and not (f"{utils.prefix()}_type" in node.extras and node.extras[f"{utils.prefix()}_type"] == "metrics"):
|
||||
if (
|
||||
type(node.extras) != type(None)
|
||||
and "glyph" in node.extras
|
||||
and not ("type" in node.extras and node.extras["type"] == "metrics")
|
||||
and not (
|
||||
f"{utils.prefix()}_type" in node.extras
|
||||
and node.extras[f"{utils.prefix()}_type"] == "metrics"
|
||||
)
|
||||
):
|
||||
out.append(node.extras)
|
||||
return out
|
||||
|
||||
except ImportError as e:
|
||||
return None
|
||||
|
||||
|
||||
# taken from blender_git/blender/scripts/addons/io_scene_gltf2/__init__.py
|
||||
|
||||
|
||||
class GetFontFacesInFile(Operator, ImportHelper):
|
||||
"""Load a glTF 2.0 font and check which faces are in there"""
|
||||
|
||||
bl_idname = f"abc3d.check_font_gltf"
|
||||
bl_label = 'Check glTF 2.0 Font'
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
bl_label = "Check glTF 2.0 Font"
|
||||
bl_options = {"REGISTER", "UNDO"}
|
||||
|
||||
files: CollectionProperty(
|
||||
name="File Path",
|
||||
type=bpy.types.OperatorFileListElement,
|
||||
)
|
||||
|
||||
# bpy.ops.abc3d.check_font_gltf(filepath="/home/jrkb/.config/blender/4.1/datafiles/abc3d/fonts/JRKB_LOL.glb")
|
||||
# bpy.ops.abc3d.check_font_gltf(filepath="/home/jrkb/.config/blender/4.1/datafiles/abc3d/fonts/JRKB_LOL.glb")
|
||||
found_fonts = []
|
||||
|
||||
def execute(self, context):
|
||||
|
@ -70,96 +81,106 @@ class GetFontFacesInFile(Operator, ImportHelper):
|
|||
|
||||
if self.files:
|
||||
# Multiple file check
|
||||
ret = {'CANCELLED'}
|
||||
ret = {"CANCELLED"}
|
||||
dirname = os.path.dirname(self.filepath)
|
||||
for file in self.files:
|
||||
path = os.path.join(dirname, file.name)
|
||||
if self.unit_check(path) == {'FINISHED'}:
|
||||
ret = {'FINISHED'}
|
||||
if self.unit_check(path) == {"FINISHED"}:
|
||||
ret = {"FINISHED"}
|
||||
return ret
|
||||
else:
|
||||
# Single file check
|
||||
return self.unit_check(self.filepath)
|
||||
|
||||
def unit_check(self, filename):
|
||||
self.found_fonts.append(["LOL","WHATEVER"])
|
||||
return {'FINISHED'}
|
||||
self.found_fonts.append(["LOL", "WHATEVER"])
|
||||
return {"FINISHED"}
|
||||
|
||||
|
||||
class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
||||
"""Load a glTF 2.0 font"""
|
||||
bl_idname = f"abc3d.import_font_gltf"
|
||||
bl_label = 'Import glTF 2.0 Font'
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
||||
filter_glob: StringProperty(default="*.glb;*.gltf", options={'HIDDEN'})
|
||||
bl_idname = f"abc3d.import_font_gltf"
|
||||
bl_label = "Import glTF 2.0 Font"
|
||||
bl_options = {"REGISTER", "UNDO"}
|
||||
|
||||
filter_glob: StringProperty(default="*.glb;*.gltf", options={"HIDDEN"})
|
||||
|
||||
files: CollectionProperty(
|
||||
name="File Path",
|
||||
type=bpy.types.OperatorFileListElement,
|
||||
)
|
||||
|
||||
loglevel: IntProperty(
|
||||
name='Log Level',
|
||||
description="Log Level")
|
||||
loglevel: IntProperty(name="Log Level", description="Log Level")
|
||||
|
||||
import_pack_images: BoolProperty(
|
||||
name='Pack Images',
|
||||
description='Pack all images into .blend file',
|
||||
default=True
|
||||
name="Pack Images", description="Pack all images into .blend file", default=True
|
||||
)
|
||||
|
||||
merge_vertices: BoolProperty(
|
||||
name='Merge Vertices',
|
||||
name="Merge Vertices",
|
||||
description=(
|
||||
'The glTF format requires discontinuous normals, UVs, and '
|
||||
'other vertex attributes to be stored as separate vertices, '
|
||||
'as required for rendering on typical graphics hardware. '
|
||||
'This option attempts to combine co-located vertices where possible. '
|
||||
'Currently cannot combine verts with different normals'
|
||||
"The glTF format requires discontinuous normals, UVs, and "
|
||||
"other vertex attributes to be stored as separate vertices, "
|
||||
"as required for rendering on typical graphics hardware. "
|
||||
"This option attempts to combine co-located vertices where possible. "
|
||||
"Currently cannot combine verts with different normals"
|
||||
),
|
||||
default=False,
|
||||
)
|
||||
|
||||
import_shading: EnumProperty(
|
||||
name="Shading",
|
||||
items=(("NORMALS", "Use Normal Data", ""),
|
||||
("FLAT", "Flat Shading", ""),
|
||||
("SMOOTH", "Smooth Shading", "")),
|
||||
items=(
|
||||
("NORMALS", "Use Normal Data", ""),
|
||||
("FLAT", "Flat Shading", ""),
|
||||
("SMOOTH", "Smooth Shading", ""),
|
||||
),
|
||||
description="How normals are computed during import",
|
||||
default="NORMALS")
|
||||
default="NORMALS",
|
||||
)
|
||||
|
||||
bone_heuristic: EnumProperty(
|
||||
name="Bone Dir",
|
||||
items=(
|
||||
("BLENDER", "Blender (best for import/export round trip)",
|
||||
(
|
||||
"BLENDER",
|
||||
"Blender (best for import/export round trip)",
|
||||
"Good for re-importing glTFs exported from Blender, "
|
||||
"and re-exporting glTFs to glTFs after Blender editing. "
|
||||
"Bone tips are placed on their local +Y axis (in glTF space)"),
|
||||
("TEMPERANCE", "Temperance (average)",
|
||||
"Bone tips are placed on their local +Y axis (in glTF space)",
|
||||
),
|
||||
(
|
||||
"TEMPERANCE",
|
||||
"Temperance (average)",
|
||||
"Decent all-around strategy. "
|
||||
"A bone with one child has its tip placed on the local axis "
|
||||
"closest to its child"),
|
||||
("FORTUNE", "Fortune (may look better, less accurate)",
|
||||
"closest to its child",
|
||||
),
|
||||
(
|
||||
"FORTUNE",
|
||||
"Fortune (may look better, less accurate)",
|
||||
"Might look better than Temperance, but also might have errors. "
|
||||
"A bone with one child has its tip placed at its child's root. "
|
||||
"Non-uniform scalings may get messed up though, so beware"),
|
||||
"Non-uniform scalings may get messed up though, so beware",
|
||||
),
|
||||
),
|
||||
description="Heuristic for placing bones. Tries to make bones pretty",
|
||||
default="BLENDER",
|
||||
)
|
||||
|
||||
guess_original_bind_pose: BoolProperty(
|
||||
name='Guess Original Bind Pose',
|
||||
name="Guess Original Bind Pose",
|
||||
description=(
|
||||
'Try to guess the original bind pose for skinned meshes from '
|
||||
'the inverse bind matrices. '
|
||||
'When off, use default/rest pose as bind pose'
|
||||
"Try to guess the original bind pose for skinned meshes from "
|
||||
"the inverse bind matrices. "
|
||||
"When off, use default/rest pose as bind pose"
|
||||
),
|
||||
default=True,
|
||||
)
|
||||
|
||||
import_webp_texture: BoolProperty(
|
||||
name='Import WebP textures',
|
||||
name="Import WebP textures",
|
||||
description=(
|
||||
"If a texture exists in WebP format, "
|
||||
"loads the WebP texture instead of the fallback PNG/JPEG one"
|
||||
|
@ -168,7 +189,7 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
)
|
||||
|
||||
glyphs: StringProperty(
|
||||
name='Import only these glyphs',
|
||||
name="Import only these glyphs",
|
||||
description=(
|
||||
"Loading glyphs is expensive, if the meshes are huge"
|
||||
"So we can filter all glyphs out that we do not want"
|
||||
|
@ -197,25 +218,32 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
layout.use_property_split = True
|
||||
layout.use_property_decorate = False # No animation.
|
||||
|
||||
layout.prop(self, 'import_pack_images')
|
||||
layout.prop(self, 'merge_vertices')
|
||||
layout.prop(self, 'import_shading')
|
||||
layout.prop(self, 'guess_original_bind_pose')
|
||||
layout.prop(self, 'bone_heuristic')
|
||||
layout.prop(self, 'export_import_convert_lighting_mode')
|
||||
layout.prop(self, 'import_webp_texture')
|
||||
layout.prop(self, "import_pack_images")
|
||||
layout.prop(self, "merge_vertices")
|
||||
layout.prop(self, "import_shading")
|
||||
layout.prop(self, "guess_original_bind_pose")
|
||||
layout.prop(self, "bone_heuristic")
|
||||
layout.prop(self, "export_import_convert_lighting_mode")
|
||||
layout.prop(self, "import_webp_texture")
|
||||
|
||||
def invoke(self, context, event):
|
||||
import sys
|
||||
|
||||
preferences = bpy.context.preferences
|
||||
for addon_name in preferences.addons.keys():
|
||||
try:
|
||||
if hasattr(sys.modules[addon_name], 'glTF2ImportUserExtension') or hasattr(sys.modules[addon_name], 'glTF2ImportUserExtensions'):
|
||||
importer_extension_panel_unregister_functors.append(sys.modules[addon_name].register_panel())
|
||||
if hasattr(
|
||||
sys.modules[addon_name], "glTF2ImportUserExtension"
|
||||
) or hasattr(sys.modules[addon_name], "glTF2ImportUserExtensions"):
|
||||
importer_extension_panel_unregister_functors.append(
|
||||
sys.modules[addon_name].register_panel()
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.has_active_importer_extensions = len(importer_extension_panel_unregister_functors) > 0
|
||||
self.has_active_importer_extensions = (
|
||||
len(importer_extension_panel_unregister_functors) > 0
|
||||
)
|
||||
return ImportHelper.invoke(self, context, event)
|
||||
|
||||
def execute(self, context):
|
||||
|
@ -230,25 +258,26 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
user_extensions = []
|
||||
|
||||
import sys
|
||||
|
||||
preferences = bpy.context.preferences
|
||||
for addon_name in preferences.addons.keys():
|
||||
try:
|
||||
module = sys.modules[addon_name]
|
||||
except Exception:
|
||||
continue
|
||||
if hasattr(module, 'glTF2ImportUserExtension'):
|
||||
if hasattr(module, "glTF2ImportUserExtension"):
|
||||
extension_ctor = module.glTF2ImportUserExtension
|
||||
user_extensions.append(extension_ctor())
|
||||
import_settings['import_user_extensions'] = user_extensions
|
||||
import_settings["import_user_extensions"] = user_extensions
|
||||
|
||||
if self.files:
|
||||
# Multiple file import
|
||||
ret = {'CANCELLED'}
|
||||
ret = {"CANCELLED"}
|
||||
dirname = os.path.dirname(self.filepath)
|
||||
for file in self.files:
|
||||
path = os.path.join(dirname, file.name)
|
||||
if self.unit_import(path, import_settings) == {'FINISHED'}:
|
||||
ret = {'FINISHED'}
|
||||
if self.unit_import(path, import_settings) == {"FINISHED"}:
|
||||
ret = {"FINISHED"}
|
||||
return ret
|
||||
else:
|
||||
# Single file import
|
||||
|
@ -308,18 +337,31 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
# indeed representing a glyph we want
|
||||
for node in gltf.data.nodes:
|
||||
# :-O woah
|
||||
if type(node.extras) != type(None) \
|
||||
and "glyph" in node.extras \
|
||||
and (node.extras["glyph"] in self.glyphs \
|
||||
or len(self.glyphs) == 0) \
|
||||
and (self.font_name == "" or \
|
||||
( "font_name" in node.extras \
|
||||
and (node.extras["font_name"] in self.font_name \
|
||||
or len(self.glyphs) == 0))) \
|
||||
and (self.face_name == "" or \
|
||||
( "face_name" in node.extras \
|
||||
and (node.extras["face_name"] in self.face_name \
|
||||
or len(self.glyphs) == 0))):
|
||||
if (
|
||||
type(node.extras) != type(None)
|
||||
and "glyph" in node.extras
|
||||
and (node.extras["glyph"] in self.glyphs or len(self.glyphs) == 0)
|
||||
and (
|
||||
self.font_name == ""
|
||||
or (
|
||||
"font_name" in node.extras
|
||||
and (
|
||||
node.extras["font_name"] in self.font_name
|
||||
or len(self.glyphs) == 0
|
||||
)
|
||||
)
|
||||
)
|
||||
and (
|
||||
self.face_name == ""
|
||||
or (
|
||||
"face_name" in node.extras
|
||||
and (
|
||||
node.extras["face_name"] in self.face_name
|
||||
or len(self.glyphs) == 0
|
||||
)
|
||||
)
|
||||
)
|
||||
):
|
||||
# if there is a match, add the node incl children ..
|
||||
add_node(node)
|
||||
# .. and their parents recursively
|
||||
|
@ -355,7 +397,7 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
# and some have different indices
|
||||
for node in nodes:
|
||||
if type(node.children) != type(None):
|
||||
children = [] # brand new children
|
||||
children = [] # brand new children
|
||||
for i, c in enumerate(node.children):
|
||||
# check if children are lost
|
||||
if c in node_indices:
|
||||
|
@ -399,23 +441,26 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
vnode = gltf.vnodes[vi]
|
||||
if vnode.type == VNode.Object:
|
||||
if vnode.parent is not None:
|
||||
if not hasattr(gltf.vnodes[vnode.parent],
|
||||
"blender_object"):
|
||||
create_blender_object(gltf,
|
||||
vnode.parent,
|
||||
nodes)
|
||||
if not hasattr(vnode,
|
||||
"blender_object"):
|
||||
if not hasattr(gltf.vnodes[vnode.parent], "blender_object"):
|
||||
create_blender_object(gltf, vnode.parent, nodes)
|
||||
if not hasattr(vnode, "blender_object"):
|
||||
obj = BlenderNode.create_object(gltf, vi)
|
||||
obj["font_import"] = True
|
||||
n_vars = vars(nodes[vi])
|
||||
if "extras" in n_vars:
|
||||
set_extras(obj, n_vars["extras"])
|
||||
if "glyph" in n_vars["extras"] and \
|
||||
not ("type" in n_vars["extras"] and \
|
||||
n_vars["extras"]["type"] == "metrics") and \
|
||||
not (f"{utils.prefix()}_type" in n_vars["extras"] and \
|
||||
n_vars["extras"][f"{utils.prefix()}_type"] == "metrics"):
|
||||
if (
|
||||
"glyph" in n_vars["extras"]
|
||||
and not (
|
||||
"type" in n_vars["extras"]
|
||||
and n_vars["extras"]["type"] == "metrics"
|
||||
)
|
||||
and not (
|
||||
f"{utils.prefix()}_type" in n_vars["extras"]
|
||||
and n_vars["extras"][f"{utils.prefix()}_type"]
|
||||
== "metrics"
|
||||
)
|
||||
):
|
||||
obj["type"] = "glyph"
|
||||
|
||||
for vi, vnode in gltf.vnodes.items():
|
||||
|
@ -432,14 +477,15 @@ class ImportGLTF2(Operator, ConvertGLTF2_Base, ImportHelper):
|
|||
if hasattr(gltf.log.logger, "removeHandler"):
|
||||
gltf.log.logger.removeHandler(gltf.log_handler)
|
||||
|
||||
return {'FINISHED'}
|
||||
return {"FINISHED"}
|
||||
|
||||
except ImportError as e:
|
||||
self.report({'ERROR'}, e.args[0])
|
||||
return {'CANCELLED'}
|
||||
self.report({"ERROR"}, e.args[0])
|
||||
return {"CANCELLED"}
|
||||
|
||||
def set_debug_log(self):
|
||||
import logging
|
||||
|
||||
if bpy.app.debug_value == 0:
|
||||
self.loglevel = logging.CRITICAL
|
||||
elif bpy.app.debug_value == 1:
|
||||
|
|
171
common/Font.py
171
common/Font.py
|
@ -7,64 +7,64 @@ from pathlib import Path
|
|||
# note: overwritten/extended by the content of "glypNamesToUnicode.txt"
|
||||
# when addon is registered in __init__.py
|
||||
name_to_glyph_d = {
|
||||
"zero": "0",
|
||||
"one": "1",
|
||||
"two": "2",
|
||||
"three": "3",
|
||||
"four": "4",
|
||||
"five": "5",
|
||||
"six": "6",
|
||||
"seven": "7",
|
||||
"eight": "8",
|
||||
"nine": "9",
|
||||
"ampersand": "&",
|
||||
"backslash": "\\",
|
||||
"colon": ":",
|
||||
"comma": ",",
|
||||
"equal": "=",
|
||||
"exclam": "!",
|
||||
"hyphen": "-",
|
||||
"minus": "−",
|
||||
"parenleft": "(",
|
||||
"parenright": "(",
|
||||
"period": ".",
|
||||
"plus": "+",
|
||||
"question": "?",
|
||||
"quotedblleft": "“",
|
||||
"quotedblright": "”",
|
||||
"semicolon": ";",
|
||||
"slash": "/",
|
||||
"space": " ",
|
||||
}
|
||||
"zero": "0",
|
||||
"one": "1",
|
||||
"two": "2",
|
||||
"three": "3",
|
||||
"four": "4",
|
||||
"five": "5",
|
||||
"six": "6",
|
||||
"seven": "7",
|
||||
"eight": "8",
|
||||
"nine": "9",
|
||||
"ampersand": "&",
|
||||
"backslash": "\\",
|
||||
"colon": ":",
|
||||
"comma": ",",
|
||||
"equal": "=",
|
||||
"exclam": "!",
|
||||
"hyphen": "-",
|
||||
"minus": "−",
|
||||
"parenleft": "(",
|
||||
"parenright": "(",
|
||||
"period": ".",
|
||||
"plus": "+",
|
||||
"question": "?",
|
||||
"quotedblleft": "“",
|
||||
"quotedblright": "”",
|
||||
"semicolon": ";",
|
||||
"slash": "/",
|
||||
"space": " ",
|
||||
}
|
||||
|
||||
space_d = {}
|
||||
|
||||
known_misspellings = {
|
||||
# simple misspelling
|
||||
"excent" : "accent",
|
||||
"overdot" : "dotaccent",
|
||||
"diaresis": "dieresis",
|
||||
"diaeresis": "dieresis",
|
||||
# character does not exist.. maybe something else
|
||||
"Odoubleacute": "Ohungarumlaut",
|
||||
"Udoubleacute": "Uhungarumlaut",
|
||||
"Wcaron": "Wcircumflex",
|
||||
"Neng": "Nlongrightleg",
|
||||
"Lgrave": "Lacute",
|
||||
# currency stuff
|
||||
"doller": "dollar",
|
||||
"euro": "Euro",
|
||||
"yuan": "yen", # https://en.wikipedia.org/wiki/Yen_and_yuan_sign
|
||||
"pound": "sterling",
|
||||
# whoopsie
|
||||
"__": "_",
|
||||
}
|
||||
# simple misspelling
|
||||
"excent": "accent",
|
||||
"overdot": "dotaccent",
|
||||
"diaresis": "dieresis",
|
||||
"diaeresis": "dieresis",
|
||||
# character does not exist.. maybe something else
|
||||
"Odoubleacute": "Ohungarumlaut",
|
||||
"Udoubleacute": "Uhungarumlaut",
|
||||
"Wcaron": "Wcircumflex",
|
||||
"Neng": "Nlongrightleg",
|
||||
"Lgrave": "Lacute",
|
||||
# currency stuff
|
||||
"doller": "dollar",
|
||||
"euro": "Euro",
|
||||
"yuan": "yen", # https://en.wikipedia.org/wiki/Yen_and_yuan_sign
|
||||
"pound": "sterling",
|
||||
# whoopsie
|
||||
"__": "_",
|
||||
}
|
||||
|
||||
|
||||
def fix_glyph_name_misspellings(name):
|
||||
for misspelling in known_misspellings:
|
||||
if misspelling in name:
|
||||
return name.replace(misspelling,
|
||||
known_misspellings[misspelling])
|
||||
return name.replace(misspelling, known_misspellings[misspelling])
|
||||
return name
|
||||
|
||||
|
||||
|
@ -88,33 +88,37 @@ def generate_from_file_d(filepath):
|
|||
d = {}
|
||||
with open(filepath) as f:
|
||||
for line in f:
|
||||
if line[0] == '#':
|
||||
if line[0] == "#":
|
||||
continue
|
||||
split = line.split(' ')
|
||||
split = line.split(" ")
|
||||
if len(split) == 2:
|
||||
(name, hexstr) = line.split(' ')
|
||||
(name, hexstr) = line.split(" ")
|
||||
val = chr(int(hexstr, base=16))
|
||||
d[name] = val
|
||||
if len(split) == 3:
|
||||
# we might have a parameter, like for the spaces
|
||||
(name, hexstr, parameter) = line.split(' ')
|
||||
(name, hexstr, parameter) = line.split(" ")
|
||||
parameter_value = float(parameter)
|
||||
val = chr(int(hexstr, base=16))
|
||||
d[name] = [val, parameter_value]
|
||||
return d
|
||||
|
||||
|
||||
def generate_name_to_glyph_d():
|
||||
return generate_from_file_d(f"{Path(__file__).parent}/glyphNamesToUnicode.txt")
|
||||
|
||||
|
||||
def generate_space_d():
|
||||
return generate_from_file_d(f"{Path(__file__).parent}/spacesUnicode.txt")
|
||||
|
||||
|
||||
def init():
|
||||
global name_to_glyph_d
|
||||
global space_d
|
||||
name_to_glyph_d = generate_name_to_glyph_d()
|
||||
space_d = generate_space_d()
|
||||
|
||||
|
||||
class FontFace:
|
||||
"""FontFace is a class holding glyphs
|
||||
|
||||
|
@ -127,8 +131,8 @@ class FontFace:
|
|||
:param filenames: from which file is this face
|
||||
:type filenames: List[str]
|
||||
"""
|
||||
def __init__(self,
|
||||
glyphs = {}):
|
||||
|
||||
def __init__(self, glyphs={}):
|
||||
self.glyphs = glyphs
|
||||
# lists have to be initialized in __init__
|
||||
# to be attributes per instance.
|
||||
|
@ -139,13 +143,15 @@ class FontFace:
|
|||
self.filepaths = []
|
||||
self.unit_factor = 1.0
|
||||
|
||||
|
||||
class Font:
|
||||
"""Font holds the faces and various metadata for a font
|
||||
|
||||
:param faces: dictionary of faces, defaults to ``Dict[str, FontFace]``
|
||||
:type faces: Dict[str, FontFace]
|
||||
"""
|
||||
def __init__(self, faces = Dict[str, FontFace]):
|
||||
|
||||
def __init__(self, faces=Dict[str, FontFace]):
|
||||
self.faces = faces
|
||||
|
||||
|
||||
|
@ -156,14 +162,18 @@ def register_font(font_name, face_name, glyphs_in_fontfile, filepath):
|
|||
fonts[font_name].faces[face_name] = FontFace({})
|
||||
fonts[font_name].faces[face_name].glyphs_in_fontfile = glyphs_in_fontfile
|
||||
else:
|
||||
fonts[font_name].faces[face_name].glyphs_in_fontfile = \
|
||||
list(set(fonts[font_name].faces[face_name].glyphs_in_fontfile + glyphs_in_fontfile))
|
||||
fonts[font_name].faces[face_name].glyphs_in_fontfile = list(
|
||||
set(
|
||||
fonts[font_name].faces[face_name].glyphs_in_fontfile
|
||||
+ glyphs_in_fontfile
|
||||
)
|
||||
)
|
||||
if filepath not in fonts[font_name].faces[face_name].filepaths:
|
||||
fonts[font_name].faces[face_name].filepaths.append(filepath)
|
||||
|
||||
|
||||
def add_glyph(font_name, face_name, glyph_id, glyph_object):
|
||||
""" add_glyph adds a glyph to a FontFace
|
||||
"""add_glyph adds a glyph to a FontFace
|
||||
it creates the :class:`Font` and :class:`FontFace` if it does not exist yet
|
||||
|
||||
:param font_name: The Font you want to add the glyph to
|
||||
|
@ -187,8 +197,9 @@ def add_glyph(font_name, face_name, glyph_id, glyph_object):
|
|||
if glyph_id not in fonts[font_name].faces[face_name].loaded_glyphs:
|
||||
fonts[font_name].faces[face_name].loaded_glyphs.append(glyph_id)
|
||||
|
||||
|
||||
def get_glyph(font_name, face_name, glyph_id, alternate=0):
|
||||
""" add_glyph adds a glyph to a FontFace
|
||||
"""add_glyph adds a glyph to a FontFace
|
||||
it creates the :class:`Font` and :class:`FontFace` if it does not exist yet
|
||||
|
||||
:param font_name: The :class:`Font` you want to get the glyph from
|
||||
|
@ -222,11 +233,14 @@ def get_glyph(font_name, face_name, glyph_id, alternate=0):
|
|||
|
||||
return fonts[font_name].faces[face_name].glyphs.get(glyph_id)[alternate]
|
||||
|
||||
|
||||
def test_glyphs_availability(font_name, face_name, text):
|
||||
# maybe there is NOTHING yet
|
||||
if not fonts.keys().__contains__(font_name) or \
|
||||
fonts[font_name].faces.get(face_name) == None:
|
||||
return "", "", text # <loaded>, <missing>, <maybe>
|
||||
if (
|
||||
not fonts.keys().__contains__(font_name)
|
||||
or fonts[font_name].faces.get(face_name) == None
|
||||
):
|
||||
return "", "", text # <loaded>, <missing>, <maybe>
|
||||
|
||||
loaded = []
|
||||
missing = []
|
||||
|
@ -240,35 +254,44 @@ def test_glyphs_availability(font_name, face_name, text):
|
|||
if c not in fonts[font_name].faces[face_name].missing_glyphs:
|
||||
fonts[font_name].faces[face_name].missing_glyphs.append(c)
|
||||
missing.append(c)
|
||||
return ''.join(loaded), ''.join(missing), ''.join(maybe), fonts[font_name].faces[face_name].filepaths
|
||||
return (
|
||||
"".join(loaded),
|
||||
"".join(missing),
|
||||
"".join(maybe),
|
||||
fonts[font_name].faces[face_name].filepaths,
|
||||
)
|
||||
|
||||
|
||||
def get_loaded_fonts():
|
||||
return fonts.keys()
|
||||
|
||||
|
||||
def get_loaded_fonts_and_faces():
|
||||
out = []
|
||||
for f in fonts.keys():
|
||||
for ff in fonts[f].faces.keys():
|
||||
out.append([f,ff])
|
||||
out.append([f, ff])
|
||||
return out
|
||||
|
||||
|
||||
MISSING_FONT = 0
|
||||
MISSING_FACE = 1
|
||||
|
||||
|
||||
def test_availability(font_name, face_name, text):
|
||||
if not fonts.keys().__contains__(font_name):
|
||||
return MISSING_FONT
|
||||
if fonts[font_name].faces.get(face_name) == None:
|
||||
return MISSING_FACE
|
||||
loaded, missing, maybe, filepaths = test_glyphs_availability(font_name,
|
||||
face_name,
|
||||
text)
|
||||
loaded, missing, maybe, filepaths = test_glyphs_availability(
|
||||
font_name, face_name, text
|
||||
)
|
||||
return {
|
||||
"loaded": loaded,
|
||||
"missing": missing,
|
||||
"maybe": maybe,
|
||||
"filepaths": filepaths,
|
||||
}
|
||||
"loaded": loaded,
|
||||
"missing": missing,
|
||||
"maybe": maybe,
|
||||
"filepaths": filepaths,
|
||||
}
|
||||
|
||||
|
||||
# holds all fonts
|
||||
|
|
Loading…
Add table
Reference in a new issue