From 8ee0d01a0feadf22ba84ffe29aa0c8860f7eb066 Mon Sep 17 00:00:00 2001 From: Joe Hultgren Date: Tue, 9 Apr 2019 18:55:45 -0700 Subject: [PATCH 1/2] Add a find_work_files hook for perforce work flows --- hooks/find_work_files.py | 29 +++ info.yml | 49 ++--- python/tk_multi_workfiles/file_finder.py | 222 ++++++++++++----------- 3 files changed, 171 insertions(+), 129 deletions(-) create mode 100644 hooks/find_work_files.py diff --git a/hooks/find_work_files.py b/hooks/find_work_files.py new file mode 100644 index 00000000..80942791 --- /dev/null +++ b/hooks/find_work_files.py @@ -0,0 +1,29 @@ +# Copyright (c) 2015 Shotgun Software Inc. +# +# CONFIDENTIAL AND PROPRIETARY +# +# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit +# Source Code License included in this distribution package. See LICENSE. +# By accessing, using, copying or modifying this work you indicate your +# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights +# not expressly granted therein are reserved by Shotgun Software Inc. + + +import sgtk + +HookClass = sgtk.get_hook_baseclass() + + +class FilterWorkFiles(HookClass): + """ + Hook that can be used to filter the list of work files found by the app for the current + Work area + """ + + def execute(self, work_files_paths, work_template, work_fields, skip_fields, skip_missing_optional_keys=True, **kwargs): + """ + Main hook entry point + + """ + # the default implementation just returns the unfiltered list: + return work_files_paths diff --git a/info.yml b/info.yml index f3cd2abd..a1e7e58d 100644 --- a/info.yml +++ b/info.yml @@ -1,11 +1,11 @@ # Copyright (c) 2015 Shotgun Software Inc. -# +# # CONFIDENTIAL AND PROPRIETARY -# -# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit +# +# This work is provided "AS IS" and subject to the Shotgun Pipeline Toolkit # Source Code License included in this distribution package. See LICENSE. -# By accessing, using, copying or modifying this work you indicate your -# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights +# By accessing, using, copying or modifying this work you indicate your +# agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. # Metadata defining the behaviour and requirements for this app @@ -19,8 +19,8 @@ configuration: launch_at_startup: type: bool description: A flag whether to launch the UI at application startup. - This option is not supported on all engines because of - differences in the way some platforms start up. Currently, only maya + This option is not supported on all engines because of + differences in the way some platforms start up. Currently, only maya nuke and 3ds Max support this option. default_value: False @@ -57,20 +57,25 @@ configuration: hook_scene_operation: type: hook default_value: "{self}/scene_operation_{engine_name}.py" - description: All the application specific scene operations (open, save etc) that + description: All the application specific scene operations (open, save etc) that the app needs to carry out are collected together in this hook. - + hook_copy_file: type: hook default_value: "{self}/copy_file.py" - description: Specify a hook that will be used to copy the file 'source_path' + description: Specify a hook that will be used to copy the file 'source_path' to 'target_path'. + hook_find_work_files: + type: hook + default_value: "{self}/find_work_files.py" + description: Specify a hook that can find extra files in the work area + hook_filter_work_files: type: hook default_value: "{self}/filter_work_files.py" description: Specify a hook that, if needed, can filter the raw list of work files found - for the current work area + for the current work area hook_filter_publishes: type: hook @@ -190,10 +195,10 @@ configuration: version_compare_ignore_fields: type: list description: A list of fields that should be ignored when comparing files to - determine if they are different versions of the same file. If + determine if they are different versions of the same file. If this is left empty then only the version field will be ignored. - Care should be taken when specifying fields to ignore as Toolkit - will expect the version to be unique across files that have + Care should be taken when specifying fields to ignore as Toolkit + will expect the version to be unique across files that have different values for those fields and will error if this isn't the case. values: @@ -228,30 +233,30 @@ configuration: saveas_prefer_version_up: type: bool - description: Control how the save-as command determines the inital name to be used. If - set to True then the name from the current scene will be used and the version - incremented. If False then a new unique name will be used and the version + description: Control how the save-as command determines the inital name to be used. If + set to True then the name from the current scene will be used and the version + incremented. If False then a new unique name will be used and the version reset default_value: False # the Shotgun fields that this app needs in order to operate correctly requires_shotgun_fields: -# More verbose description of this item +# More verbose description of this item display_name: "Shotgun Workfiles" description: "Using this app you can browse, open and save your Work Files and Publishes." - + # Required minimum versions for this item to run requires_shotgun_version: requires_core_version: "v0.17.0" requires_engine_version: # the engines that this app can operate in: -supported_engines: +supported_engines: # the frameworks required to run this app frameworks: # We need a version of tk-framework-shotgunutils with a fix for deleting items. - {"name": "tk-framework-shotgunutils", "version": "v5.x.x", "minimum_version": "v5.3.5"} - - {"name": "tk-framework-qtwidgets", "version": "v2.x.x"} - + - {"name": "tk-framework-qtwidgets", "version": "v2.x.x"} + diff --git a/python/tk_multi_workfiles/file_finder.py b/python/tk_multi_workfiles/file_finder.py index 7fc09d62..18b5cb4d 100644 --- a/python/tk_multi_workfiles/file_finder.py +++ b/python/tk_multi_workfiles/file_finder.py @@ -73,13 +73,13 @@ def _generate_name(self, path, template, fields=None): # find out if version is used in the file name: template_name, _ = os.path.splitext(os.path.basename(template.definition)) version_in_name = "{version}" in template_name - + # extract the file name from the path: name, _ = os.path.splitext(os.path.basename(path)) delims_str = "_-. " if version_in_name: - # looks like version is part of the file name so we - # need to isolate it so that we can remove it safely. + # looks like version is part of the file name so we + # need to isolate it so that we can remove it safely. # First, find a dummy version whose string representation # doesn't exist in the name string version_key = template.keys["version"] @@ -89,40 +89,40 @@ def _generate_name(self, path, template, fields=None): if test_str not in name: break dummy_version += 1 - + # now use this dummy version and rebuild the path fields["version"] = dummy_version path = template.apply_fields(fields) name, _ = os.path.splitext(os.path.basename(path)) - + # we can now locate the version in the name and remove it dummy_version_str = version_key.str_from_value(dummy_version) - + v_pos = name.find(dummy_version_str) # remove any preceeding 'v' pre_v_str = name[:v_pos].rstrip("v") post_v_str = name[v_pos + len(dummy_version_str):] - - if (pre_v_str and post_v_str - and pre_v_str[-1] in delims_str + + if (pre_v_str and post_v_str + and pre_v_str[-1] in delims_str and post_v_str[0] in delims_str): # only want one delimiter - strip the second one: post_v_str = post_v_str.lstrip(delims_str) - + versionless_name = pre_v_str + post_v_str versionless_name = versionless_name.strip(delims_str) - + if versionless_name: # great - lets use this! name = versionless_name - else: - # likely that version is only thing in the name so + else: + # likely that version is only thing in the name so # instead, replace the dummy version with #'s: - zero_version_str = version_key.str_from_value(0) + zero_version_str = version_key.str_from_value(0) new_version_str = "#" * len(zero_version_str) name = name.replace(dummy_version_str, new_version_str) - - return name + + return name def __init__(self, parent=None): """ @@ -136,58 +136,58 @@ def __init__(self, parent=None): def find_files(self, work_template, publish_template, context, filter_file_key=None): """ Find files using the specified context, work and publish templates - + :param work_template: The template to use when searching for work files :param publish_template: The template to use when searching for publish files :param context: The context to search for file with - :param filter_file_key: A unique file 'key' that if specified will limit the returned list of files to just + :param filter_file_key: A unique file 'key' that if specified will limit the returned list of files to just those that match. This 'key' should be generated using the FileItem.build_file_key() method. - :returns: A list of FileItem instances, one for each unique version of a file found in either + :returns: A list of FileItem instances, one for each unique version of a file found in either the work or publish areas """ # can't find anything without a work template! if not work_template: return [] - + # determien the publish filters to use from the context: publish_filters = [["entity", "is", context.entity or context.project]] if context.task: publish_filters.append(["task", "is", context.task]) else: publish_filters.append(["task", "is", None]) - + # get the list of valid file extensions if set: - valid_file_extensions = [".%s" % ext if not ext.startswith(".") else ext + valid_file_extensions = [".%s" % ext if not ext.startswith(".") else ext for ext in self._app.get_setting("file_extensions", [])] - + # get list of fields that should be ignored when comparing work files: - version_compare_ignore_fields = self._app.get_setting("version_compare_ignore_fields", []) + version_compare_ignore_fields = self._app.get_setting("version_compare_ignore_fields", []) # find all work & publish files and filter out any that should be ignored: work_files = self._find_work_files(context, work_template, version_compare_ignore_fields) filtered_work_files = self._filter_work_files(work_files, valid_file_extensions) - + published_files = self._find_publishes(publish_filters) - filtered_published_files = self._filter_publishes(published_files, - publish_template, + filtered_published_files = self._filter_publishes(published_files, + publish_template, valid_file_extensions) - + # turn these into FileItem instances: name_map = FileFinder._FileNameMap() - work_file_item_details = self._process_work_files(filtered_work_files, - work_template, - context, - name_map, - version_compare_ignore_fields, + work_file_item_details = self._process_work_files(filtered_work_files, + work_template, + context, + name_map, + version_compare_ignore_fields, filter_file_key) work_file_items = dict([(k, FileItem(**kwargs)) for k, kwargs in work_file_item_details.iteritems()]) - publish_item_details = self._process_publish_files(filtered_published_files, - publish_template, - work_template, - context, - name_map, + publish_item_details = self._process_publish_files(filtered_published_files, + publish_template, + work_template, + context, + name_map, version_compare_ignore_fields, filter_file_key) publish_items = dict([(k, FileItem(**kwargs)) for k, kwargs in publish_item_details.iteritems()]) @@ -205,7 +205,7 @@ def find_files(self, work_template, publish_template, context, filter_file_key=N return file_items - def _process_work_files(self, work_files, work_template, context, name_map, version_compare_ignore_fields, + def _process_work_files(self, work_files, work_template, context, name_map, version_compare_ignore_fields, filter_file_key=None): """ :param work_files: A list of dictionaries with file details. @@ -221,12 +221,12 @@ def _process_work_files(self, work_files, work_template, context, name_map, vers :class:`FileItem`. """ files = {} - + for work_file in work_files: - + # always have the work path: work_path = work_file["path"] - + # get fields for work file: wf_fields = work_template.get_fields(work_path) wf_ctx = None @@ -244,15 +244,15 @@ def _process_work_files(self, work_files, work_template, context, name_map, vers if filter_file_key and file_key != filter_file_key: # we can ignore this file completely! continue - + # copy common fields from work_file: # file_details = dict([(k, v) for k, v in work_file.iteritems() if k != "path"]) - + # get version from fields if not specified in work file: if not file_details["version"]: file_details["version"] = wf_fields.get("version", 0) - + # if no task try to determine from context or path: if not file_details["task"]: if context.task: @@ -261,11 +261,11 @@ def _process_work_files(self, work_files, work_template, context, name_map, vers # try to create a context from the path and see if that contains a task: wf_ctx = self._app.sgtk.context_from_path(work_path, context) if wf_ctx and wf_ctx.task: - file_details["task"] = wf_ctx.task + file_details["task"] = wf_ctx.task # Add additional fields: # - + # Entity: file_details["entity"] = context.entity @@ -295,26 +295,26 @@ def _process_work_files(self, work_files, work_template, context, name_map, vers "work_path": work_path, "work_details": file_details } - + return files - - def _process_publish_files(self, sg_publishes, publish_template, work_template, context, name_map, + + def _process_publish_files(self, sg_publishes, publish_template, work_template, context, name_map, version_compare_ignore_fields, filter_file_key=None): """ """ files = {} - + # and add in publish details: ctx_fields = context.as_template_fields(work_template) - + for sg_publish in sg_publishes: file_details = {} - + # always have a path: publish_path = sg_publish["path"] - + # determine the work path fields from the publish fields + ctx fields: - # The order is important as it ensures that the user is correct if the + # The order is important as it ensures that the user is correct if the # publish file is in a user sandbox but we also need to be careful not # to overrwrite fields that are being ignored when comparing work files publish_fields = publish_template.get_fields(publish_path) @@ -322,10 +322,10 @@ def _process_publish_files(self, sg_publishes, publish_template, work_template, for k, v in ctx_fields.iteritems(): if k not in version_compare_ignore_fields: wp_fields[k] = v - + # build the unique file key for the publish path. All files that share the same key are considered # to be different versions of the same file. - file_key = FileItem.build_file_key(wp_fields, work_template, + file_key = FileItem.build_file_key(wp_fields, work_template, version_compare_ignore_fields) if filter_file_key and file_key != filter_file_key: # we can ignore this file completely! @@ -339,18 +339,18 @@ def _process_publish_files(self, sg_publishes, publish_template, work_template, # unable to generate a work path - this means we are probably missing a field so it's going to # be a problem matching this publish up with its corresponding work file! work_path = "" - + # copy common fields from sg_publish: # file_details = dict([(k, v) for k, v in sg_publish.iteritems() if k != "path"]) - + # get version from fields if not specified in publish file: if file_details["version"] == None: file_details["version"] = publish_fields.get("version", 0) - + # entity file_details["entity"] = context.entity - + # local file modified details: if os.path.exists(publish_path): try: @@ -371,7 +371,7 @@ def _process_publish_files(self, sg_publishes, publish_template, work_template, # add new file item for this publish. Note that we also keep track of the # work path even though we don't know if this publish has a corresponding # work file. - files[(file_key, file_details["version"])] = {"key":file_key, + files[(file_key, file_details["version"])] = {"key":file_key, "work_path":work_path, "is_published":True, "publish_path":publish_path, @@ -397,14 +397,14 @@ def _filter_publishes(self, sg_publishes, publish_template, valid_file_extension """ # build list of publishes to send to the filter_publishes hook: hook_publishes = [{"sg_publish":sg_publish} for sg_publish in sg_publishes] - + # execute the hook - this will return a list of filtered publishes: hook_result = self._app.execute_hook("hook_filter_publishes", publishes = hook_publishes) if not isinstance(hook_result, list): - self._app.log_error("hook_filter_publishes returned an unexpected result type '%s' - ignoring!" + self._app.log_error("hook_filter_publishes returned an unexpected result type '%s' - ignoring!" % type(hook_result).__name__) hook_result = [] - + # split back out publishes: published_files = [] for item in hook_result: @@ -419,48 +419,48 @@ def _filter_publishes(self, sg_publishes, publish_template, valid_file_extension path = (sg_publish["path"] or {}).get("local_path") if not path: continue - + # skip file if it doesn't contain a valid file extension: if valid_file_extensions and os.path.splitext(path)[1] not in valid_file_extensions: continue - - # make sure path matches the publish template: + + # make sure path matches the publish template: if not publish_template.validate(path): continue - + # build file details for this publish: file_details = {"path":path} - + # add in details from sg record: file_details["version"] = sg_publish.get("version_number") file_details["name"] = sg_publish.get("name") file_details["task"] = sg_publish.get("task") file_details["publish_description"] = sg_publish.get("description") file_details["thumbnail"] = sg_publish.get("image") - + file_details["published_at"] = sg_publish.get("created_at") file_details["published_by"] = sg_publish.get("created_by", {}) file_details["published_file_entity_id"] = sg_publish.get("id") - + # find additional information: editable_info = item.get("editable") if editable_info and isinstance(editable_info, dict): file_details["editable"] = editable_info.get("can_edit", True) file_details["editable_reason"] = editable_info.get("reason", "") - + # append to published files list: - published_files.append(file_details) - + published_files.append(file_details) + return published_files - - + + def _find_work_files(self, context, work_template, version_compare_ignore_fields): """ Find all work files for the specified context and work template. - + :param context: The context to find work files for :param work_template: The work template to match found files against - :param version_compare_ignore_fields: List of fields to ignore when comparing files in order to find + :param version_compare_ignore_fields: List of fields to ignore when comparing files in order to find different versions of the same file :returns: A list of file paths. """ @@ -470,7 +470,7 @@ def _find_work_files(self, context, work_template, version_compare_ignore_fields work_fields = context.as_template_fields(work_template, validate=True) except TankError as e: # could not resolve fields from this context. This typically happens - # when the context object does not have any corresponding objects on + # when the context object does not have any corresponding objects on # disk / in the path cache. In this case, we cannot continue with any # file system resolution, so just exit early insted. return [] @@ -484,7 +484,7 @@ def _find_work_files(self, context, work_template, version_compare_ignore_fields # ensure we find as wide a range of files as possible considering all optional keys. # Note, this may be better as a general change to the paths_from_template method... skip_fields += [n for n in work_fields.keys() if work_template.is_optional(n)] - + # Find all versions so skip the 'version' key if it's present and not # already registered in our wildcards: if "version" not in skip_fields: @@ -497,6 +497,14 @@ def _find_work_files(self, context, work_template, version_compare_ignore_fields skip_fields, skip_missing_optional_keys=True ) + work_file_paths = self._app.execute_hook( + "hook_find_work_files", + work_files_paths=work_file_paths, + work_template=work_template, + work_fields=work_fields, + skip_fields=skip_fields, + skip_missing_optional_keys=True + ) return work_file_paths def _filter_work_files(self, work_file_paths, valid_file_extensions): @@ -516,7 +524,7 @@ def _filter_work_files(self, work_file_paths, valid_file_extensions): hook_work_files = [ {"work_file":{"path":path}} for path in work_file_paths ] - + # Execute the hook - this will return a list of filtered paths: hook_result = self._app.execute_hook( "hook_filter_work_files", @@ -527,21 +535,21 @@ def _filter_work_files(self, work_file_paths, valid_file_extensions): "hook_filter_work_files returned an unexpected result type '%s' - ignoring..." % type(hook_result).__name__) hook_result = [] - + # split back out work files: work_files = [] for item in hook_result: work_file = item.get("work_file") if not work_file: continue - + path = work_file.get("path") if not path: continue - + if valid_file_extensions and os.path.splitext(path)[1] not in valid_file_extensions: continue - + # Build the dictionary with details for the filtered path. # Please note that unless the hook added additional details, we only # have a path key in the work_file dictionary. @@ -555,15 +563,15 @@ def _filter_work_files(self, work_file_paths, valid_file_extensions): "modified_at": work_file.get("modified_at"), "modified_by": work_file.get("modified_by", {}), } - + # Find additional information: editable_info = item.get("editable") if editable_info and isinstance(editable_info, dict): file_details["editable"] = editable_info.get("can_edit", True) - file_details["editable_reason"] = editable_info.get("reason", "") - + file_details["editable_reason"] = editable_info.get("reason", "") + work_files.append(file_details) - + return work_files class AsyncFileFinder(FileFinder): @@ -617,7 +625,7 @@ def __init__(self, bg_task_manager, parent=None): def shut_down(self): """ """ - # clean up any publish models - not doing this will result in + # clean up any publish models - not doing this will result in # severe instability! for search in self._searches: if search.publish_model: @@ -714,7 +722,7 @@ def _begin_search_for_work_files(self, search, work_area): search.user_work_areas[user_id] = user_work_area # find work files: - find_work_files_task = self._bg_task_manager.add_task(self._task_find_work_files, + find_work_files_task = self._bg_task_manager.add_task(self._task_find_work_files, group=search.id, priority=AsyncFileFinder._FIND_FILES_PRIORITY, task_kwargs = {"environment":user_work_area}) @@ -728,7 +736,7 @@ def _begin_search_for_work_files(self, search, work_area): # build work items: process_work_items_task = self._bg_task_manager.add_task(self._task_process_work_items, - group=search.id, + group=search.id, priority=AsyncFileFinder._FIND_FILES_PRIORITY, upstream_task_ids = [filter_work_files_task], task_kwargs = {"environment":user_work_area, @@ -811,7 +819,7 @@ def _on_background_task_completed(self, task_id, search_id, result): search.aborted = True return - # we have successfully constructed a work area that we can + # we have successfully constructed a work area that we can # use for the next stage so begin searching for work files: self._begin_search_for_work_files(search, work_area) # and also add a task to process cached publishes: @@ -871,7 +879,7 @@ def _on_background_search_finished(self, search_id): # be emitted multiple times for a single search so we need to check # that the search has actually finished! if search.users and not search.aborted: - if (search.find_publishes_tasks or search.find_work_files_tasks + if (search.find_publishes_tasks or search.find_work_files_tasks or search.load_cached_pubs_task or not search.publish_model_refreshed ): # we still have work outstanding! @@ -959,20 +967,20 @@ def _task_filter_publishes(self, sg_publishes, environment, **kwargs): created_at = datetime.fromtimestamp(created_at, sg_timezone.LocalTimezone()) sg_publish["created_at"] = created_at - filtered_publishes = self._filter_publishes(sg_publishes, - environment.publish_template, + filtered_publishes = self._filter_publishes(sg_publishes, + environment.publish_template, environment.valid_file_extensions) - return {"sg_publishes":filtered_publishes} + return {"sg_publishes":filtered_publishes} def _task_process_publish_items(self, sg_publishes, environment, name_map, **kwargs): """ """ publish_items = {} - if (sg_publishes and environment and environment.publish_template + if (sg_publishes and environment and environment.publish_template and environment.work_template and environment.context and name_map): - publish_items = self._process_publish_files(sg_publishes, - environment.publish_template, - environment.work_template, + publish_items = self._process_publish_files(sg_publishes, + environment.publish_template, + environment.work_template, environment.context, name_map, environment.version_compare_ignore_fields) @@ -983,8 +991,8 @@ def _task_find_work_files(self, environment, **kwargs): """ work_files = [] if (environment and environment.context and environment.work_template): - work_files = self._find_work_files(environment.context, - environment.work_template, + work_files = self._find_work_files(environment.context, + environment.work_template, environment.version_compare_ignore_fields) return {"work_files":work_files} @@ -1001,10 +1009,10 @@ def _task_process_work_items(self, work_files, environment, name_map, **kwargs): """ """ work_items = {} - if (work_files and environment and environment.work_template + if (work_files and environment and environment.work_template and environment.context and name_map): - work_items = self._process_work_files(work_files, - environment.work_template, + work_items = self._process_work_files(work_files, + environment.work_template, environment.context, name_map, environment.version_compare_ignore_fields) From 4fc231f4b37d648a90403d20e9e10e7fe4a20a24 Mon Sep 17 00:00:00 2001 From: Joe Hultgren Date: Sat, 15 Feb 2020 15:03:58 -0800 Subject: [PATCH 2/2] update class name and description --- hooks/find_work_files.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/hooks/find_work_files.py b/hooks/find_work_files.py index 80942791..cedeb7e6 100644 --- a/hooks/find_work_files.py +++ b/hooks/find_work_files.py @@ -14,10 +14,10 @@ HookClass = sgtk.get_hook_baseclass() -class FilterWorkFiles(HookClass): +class FindWorkFiles(HookClass): """ - Hook that can be used to filter the list of work files found by the app for the current - Work area + Hook that can be used to provide more ways to files to open. One such option would be to list files from perforce that + aren't synced on the users machine yet. """ def execute(self, work_files_paths, work_template, work_fields, skip_fields, skip_missing_optional_keys=True, **kwargs):