Last active
April 24, 2019 16:11
-
-
Save wizofe/eb4cacff026b400eaefc6dc1d65ddd75 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from __future__ import print_function | |
import os | |
import glob | |
import re | |
import json | |
from pprint import pprint | |
import comtypes.client as ct | |
from comtypes import COMError | |
### Credits: | |
# oiehot gist | |
# https://gist.github.com/oiehot/a63782138af5508481f247f40aa0d5f1 | |
# also adam pletcher & evan mccall! | |
### Setting things up | |
# Crazy Photoshop API presets | |
# Kinda documented -> search for "com.adobe.photoshop API" | |
PS_NO_DIALOGS = 3 # from enum PsDialogModes | |
PS_DONTSAVE = 2 # psDoNotSaveChanges | |
# inherited from ElementPlacement | |
# makes sense :thinking: | |
PLACEAFTER = 1 | |
PLACEATEND = 3 | |
PLACEBEFORE = 2 | |
### Grab the settings from the JSON | |
# It requires a JSON with the following structure: | |
# Observe the forward slashes and the closing slash on path!! | |
# { | |
# "root_dir": "c:/tmp/test/", | |
# "output_file": "c:/tmp/save_psb_file.psb", | |
# "accessory_tags": "Body_Side, Grille", | |
# "visible_layers": "Touring_B_593, Wheel_Touring_Straight", | |
# "include_lights": true, | |
# "include_wheel": false | |
# } | |
# TODO: Write a validator | |
try: | |
with open('settings.json') as json_data_file: | |
data = json.load(json_data_file) | |
print('json loaded') | |
ROOT_PATH = data['root_dir'] | |
LIGHTS_INCLUDED = data['include_lights'] | |
WHEELS_INCLUDED = data['include_wheels'] | |
OUTPUT_FILE = data['output_file'] | |
GROUPED_LAYERS = [x.strip() for x in data['accessory_tags'].split(',')] | |
VISIBLE_LAYERS = [x.strip() for x in data['visible_layers'].split(',')] | |
except: | |
print("E: Something bad happened. Please check your JSON file!") | |
raise | |
# this has to be in reverse order! | |
DIR_TEMPLATE = ['Trims', 'Wheels', 'Accessories', 'Lights'] | |
IGNORED_DIRS = ['Misc', 'Wheel_Merged'] | |
if not LIGHTS_INCLUDED: | |
IGNORED_DIRS.append('Lights') | |
if not WHEELS_INCLUDED: | |
IGNORED_DIRS.append('Wheels') | |
### Load a Photoshop Object | |
try: # to load photoshop or get a reference for already running photoshop app | |
psApp = ct.CreateObject('Photoshop.Application') | |
# CreateObject: Runs from the command line and opens photoshop | |
# GetActiveObject: Uses an open instance of Photoshop | |
except COMError as cerror: | |
# Photoshop is missing or busy | |
print("Woah! Something bad happened! {}".format(cerror.message)) | |
raise | |
# Set units to pixels | |
psApp.Preferences.RulerUnits = 1 | |
def validate_folder_structure(): | |
pass # whistling | |
def get_image_size(path): | |
""" Open an image and return its size | |
Args: | |
path (str): The exact image file path including the extension | |
Returns: | |
size (tuple): The size of the image in pixels (width, height) | |
""" | |
doc = psApp.Open(path) | |
size = (doc.width, doc.height) | |
doc.Close(2) # 2 (psDoNotSaveChanges) | |
return size | |
def create_main_image(width=6000, height=3374, res=72): | |
""" Initialise a new Photoshop instance and | |
create an image with specific dimensions | |
and resolution. | |
Args: | |
width (int): Width of the Photoshop Document | |
height (int): Height of the Photoshop Document | |
res (int): Resolution in dpi | |
Returns: | |
(obj): The Photoshop Document object | |
""" | |
try: | |
return psApp.Documents.Add(width, height, res, "new-psb-test", 2, 1, 1) | |
except COMError as cerror: | |
print("E: Something bad happened: {}".format(cerror.message)) | |
raise | |
def copy_file_contents_to_clipboard(path): | |
""" Open a Photoshop file and copy its contents to the clipboard | |
Args: | |
path (str): the exact file path | |
""" | |
doc = psApp.Open(path) | |
doc.layers[0].Copy() | |
doc.Close(2) # 2 (psDoNotSaveChanges) | |
def create_layer_from_file(doc, layer_name, layer_set, path): | |
""" Create new Layer from File nested under a LayerSet | |
Args: | |
doc (obj): The working Photoshop file | |
layer_name (str): The given name for the Layer | |
layer_set (obj): the LayerSet object that the Layer is nested under | |
path (str): the full Path of the file (including the extension) | |
Returns: | |
""" | |
# copy_file_contents_to_clipboard(path) | |
psApp.activeDocument = doc | |
# layer = doc.artLayers.add() # Create Layer | |
layer = layer_set.artLayers.Add() | |
layer.name = layer_name # Rename Layer | |
doc.activeLayer = layer # Select Layer | |
# this replacement isn't nice and it shouldn't be done like this | |
# TODO: mitigate to Python 3 and use Paths! | |
paste_file_as_linked_layer(os.path.realpath(path).replace('\\', '/')) | |
# this can use an optional parameter so you can choose between | |
# open and paste a file as a layer (two commented lines) | |
# or just paste linked layers (that's why it should be kept as two | |
# separate functions). | |
#doc.Paste() # Paste into | |
return layer | |
def get_layer_by_name(doc, name): | |
""" Search a layer on the Photoshop Document by its name | |
Args: | |
doc (obj): The Photoshop Document Object | |
name (str): The name of the (art) Layer to look-up | |
Returns: | |
obj: The Art Layer Object found | |
""" | |
for layer in doc.artLayers: | |
if name in layer.name: | |
return (layer) | |
def get_group_by_name(doc, name): | |
""" Search a layer on the Photoshop Document by its name | |
Args: | |
doc (obj): The Photoshop Document Object | |
name (str): The name of the (art) Layer to look-up | |
Returns: | |
obj: The Art Layer Object found | |
""" | |
for layer_set in doc.LayerSets: | |
if name in layer_set.name: | |
return (layer_set) | |
def create_group_named(doc, layer_set, under=''): | |
""" Create a New LayerSet (aka Group) in the Photoshop | |
document (doc). | |
Args: | |
doc (obj): The Photoshop Document Instance | |
under (obj): The Group Object Instance | |
(e.g. if you want a subgroup under Lights then give that | |
object as a under name) | |
layer_set (str): The name of the new Layer Set | |
Returns: | |
new_layer_set (obj): The LayerSet (Group) Object | |
""" | |
if not under: # add a top level group | |
new_layer_set = doc.layerSets.Add() | |
else: # add subgroup | |
new_layer_set = under.LayerSets.Add() | |
new_layer_set.name = layer_set | |
return new_layer_set | |
def dir2json(path): | |
""" Read a directory and save it in a dictionary | |
structure (which the same as JSON for Python) | |
Args: | |
path (str): The full path to be parsed | |
Returns: | |
d (dict): The dictionary which includes the JSON file structure | |
""" | |
# if not os.path.isdir(path) or not os.access(path, os.R_OK): | |
# raise Exception('E: Bad things happened! Check directory permissions.') | |
if os.path.isdir(path): | |
d = {} | |
for name in os.listdir(path): | |
d[name] = dir2json(os.path.join(path, name)) | |
else: | |
d = os.path.getsize(path) | |
return d | |
def get_files(path): | |
""" Get all the files in a directory | |
Args: | |
path (str): The full path to be parsed | |
Returns: | |
files_list[0] (list): Of the files in the current directory | |
""" | |
from timeit import timeit | |
#timeit(files_list = [files for (_, _, files) in os.walk(path)]) | |
files_list = [] | |
for (_, _, files) in os.walk(path): | |
files_list.append(files) | |
return files_list[0] # only top level files | |
def get_subdirs(path): | |
""" Returns the subdir of the path if one exists | |
Args: | |
path (str): the full path for which to search for subdirs | |
Returns: | |
list: of subdirs for the path | |
""" | |
sub_dir = next(os.walk(path))[1] | |
if sub_dir: # only if we have some | |
return sub_dir | |
def search_file(main_path, pattern, min=True): | |
""" Implement a UNIX-style pattern match of a file pattern | |
in the given path. If the option min is enabled (as it is | |
by default) it just returns the first file. Otherwise it returns | |
a list with all the files matched. | |
Args: | |
path (str): The patch on which to perform the search | |
pattern (str): UNIX-style file regex | |
Returns: | |
list or filename: If min is enabled return just the first file | |
matched, otherwise a list of all the matched files | |
""" | |
from fnmatch import fnmatch | |
import os | |
file_list = [ | |
file for file in os.listdir(main_path) if fnmatch(file, pattern) | |
] | |
return main_path + file_list[0][0] if len( | |
file_list) > 1 and min == True else main_path + file_list[0] | |
def match_filename_partially(filename, pattern): | |
""" Look for a pattern in a filename | |
Args: | |
filname (str): | |
pattern (list): That is the list of the pattern | |
Returns: | |
boolean: True for successful match otherwise False | |
""" | |
for item in pattern: | |
return True if item in filename else False | |
def import_all_pngs_as_layers(doc, layer_set, path): | |
''' Import all the pngs in the path as layers under layerSet(group) | |
Args: | |
doc (obj): The Photoshop Document Instance | |
layer_set (str): The name of the new Layer Set | |
path (str): the exact file path including the extension | |
Returns: | |
errored_files (list): List of failed files names | |
''' | |
try: | |
errored_files = [] | |
for each_file in glob.glob("*.png"): | |
create_layer_from_file(doc, each_file, layer_set, | |
path + '/' + each_file) | |
except Exception as e: | |
# files that failed loading into photoshop and the reason why | |
errored_files.append(each_file + ":" + str(e)) | |
return errored_files | |
def paste_file_as_linked_layer(path): | |
""" Import a file as a photoshop (smart) linked layer | |
Args: | |
path(str): The exact path of the image including extension | |
Returns: | |
whatever execute action returns (TBC) | |
""" | |
idPlc = psApp.charIDToTypeID("Plc ") | |
desc11 = ct.CreateObject("Photoshop.ActionDescriptor") | |
idIdnt = psApp.charIDToTypeID("Idnt") | |
desc11.putInteger(idIdnt, 2) | |
# Open the file (path) | |
idnull = psApp.charIDToTypeID("null") | |
desc11.putPath(idnull, path) | |
# set its type as a linked payer | |
idLnkd = psApp.charIDToTypeID("Lnkd") | |
desc11.putBoolean(idLnkd, True) | |
idFTcs = psApp.charIDToTypeID("FTcs") | |
idQCSt = psApp.charIDToTypeID("QCSt") | |
idQcsa = psApp.charIDToTypeID("Qcsa") | |
desc11.putEnumerated(idFTcs, idQCSt, idQcsa) | |
idOfst = psApp.charIDToTypeID("Ofst") | |
desc12 = ct.CreateObject('Photoshop.ActionDescriptor') | |
idHrzn = psApp.charIDToTypeID("Hrzn") | |
idRlt = psApp.charIDToTypeID("#Rlt") | |
desc12.putUnitDouble(idHrzn, idRlt, 0) | |
idVrtc = psApp.charIDToTypeID("Vrtc") | |
idRlt = psApp.charIDToTypeID("#Rlt") | |
desc12.putUnitDouble(idVrtc, idRlt, 0) | |
idOfst = psApp.charIDToTypeID("Ofst") | |
# put the object in an offset space of 0,0 | |
desc11.putObject(idOfst, idOfst, desc12) | |
# 'return' of the function | |
# is the placement of the linked layer | |
f = psApp.executeAction(idPlc, desc11, PS_NO_DIALOGS) | |
def collapse_all_groups(): | |
""" Collapse all the groups in the current document | |
""" | |
desc19 = ct.CreateObject('Photoshop.ActionDescriptor') | |
idCollapse = psApp.StringIDToTypeID('collapseAllGroupsEvent') | |
psApp.executeAction(idCollapse, desc19, PS_NO_DIALOGS) | |
def save_as_psb(doc, path): | |
""" Saves the document to PSB file format | |
Args: | |
doc(obj): The Photoshop Document object | |
path(str): The exact file path including the extension | |
Returns: | |
ret(obj): A save execution object | |
""" | |
psd_opt = ct.CreateObject('Photoshop.PhotoshopSaveOptions') | |
psd_opt.layers = True | |
# psd_opt.annotations = False | |
# psd_opt.alphaChannels = True | |
# psd_opt.spotColors = True | |
psd_opt.embedColorProfile = True | |
doc.saveAs(path, psd_opt, True) | |
def cTID(s): | |
return psApp.CharIDToTypeID(s) | |
def sTID(s): | |
return psApp.StringIDToTypeID(s) | |
def _save_as_psb(path): | |
desc19 = ct.CreateObject("Photoshop.ActionDescriptor") | |
desc20 = ct.CreateObject("Photoshop.ActionDescriptor") | |
desc20.putBoolean(sTID('maximizeCompatibility'), True) | |
desc19.putObject(cTID('As '), cTID('Pht8'), desc20) | |
desc19.putPath(cTID('In '), path) | |
desc19.putBoolean(cTID('LwCs'), True) | |
psApp.executeAction(cTID('save'), desc19, 3) | |
def enable_layer_by_rule(layer, rule): | |
matched_layers = [layer for x in rule if x in layer.name] | |
for matched_layer in matched_layers: | |
matched_layer.Visible = True | |
def find_all_items(obj, key, keys=None): | |
""" Finds all items in a dictionary (it doesn't apply to lists) | |
It iterates recusrsively through keys and subkeys | |
Snippet by: Dolan Antenucci | |
Example of use: | |
d = {'a': 1, 'b': 2, 'c': {'a': 3, 'd': 4, 'e': {'a': 9, 'b': 3}, 'j': {'c': 4}}} | |
for k, v in find_all_items(d, 'a'): | |
print "* {} = {} *".format('->'.join(k), v) | |
Args: | |
obj (dict): the dictionary the search will be performed to | |
key (str): the item one needs to search for | |
Returns: | |
dict: a dictionary with all the values and the parent keys (and subkeys) | |
and vice versa | |
""" | |
ret = [] | |
if not keys: | |
keys = [] | |
if key in obj: | |
out_keys = keys + [key] | |
ret.append((out_keys, obj[key])) | |
for k, v in obj.items(): | |
if isinstance(v, dict): | |
found_items = find_all_items(v, key, keys=(keys + [k])) | |
ret += found_items | |
return ret | |
def do_shit(): | |
# Logging the execution time (just for fun [and optimisation]) | |
import time | |
start = time.time() | |
# We don't want any Photoshop dialogs during automated execution | |
# psApp.displayDialogs = PS_NO_DIALOGS | |
# get the image dimensions from any background png | |
bg_wh = get_image_size(search_file(ROOT_PATH + 'Misc/', '*background*.png')) | |
# which of course is float and photoshop doesn't like float (!) | |
# TODO: make this more consistent | |
# looks hacky | |
wi = int(bg_wh[0]) | |
hi = int(bg_wh[1]) | |
# and create a main document with those dimensions | |
global new_doc | |
new_doc = create_main_image(wi, hi) | |
print("Grabbed the dimensions from background! w: {} h: {}".format(wi, hi)) | |
# Algorithm (?): | |
# --------- | |
# Starting from given top folder structure . | |
# traverse downwards and create | |
# photoshop groups named as the folders | |
# import all the png files under this | |
# as linked layers and name them accordingly | |
# traverse another layer deeper | |
# and do the same | |
# Import layers from Misc as Background and Shadow | |
os.chdir(ROOT_PATH + '/Misc') | |
import_all_pngs_as_layers(new_doc, new_doc, ROOT_PATH + '/Misc') | |
os.chdir(ROOT_PATH) # Revert the working path | |
duplicate = False | |
subdir = False | |
### ATTENTION: Now grab a beer before you start looking at the following! | |
### No, really. | |
# TODO: Optimise this thing. | |
for root, dd, ff in os.walk('.'): | |
path = root.split(os.sep) | |
if not os.path.basename(root) == '.': # ignore parent | |
if os.path.dirname(root).replace(".\\", "") in IGNORED_DIRS: | |
pass | |
elif not os.path.dirname(root) == '.' and not os.path.dirname( | |
root).replace(".\\", "") in IGNORED_DIRS: | |
# print('I am a subdir {} of the dir {}'.format( | |
# os.path.basename(root), | |
# os.path.dirname(root).replace(".\\", ""))) | |
create_group_named( | |
new_doc, os.path.basename(root), | |
new_doc.LayerSets(os.path.dirname(root).replace(".\\", ""))) | |
elif not os.path.basename(root) in IGNORED_DIRS: | |
# print("Create TOP LEVEL layer group named", | |
# os.path.basename(root)) | |
create_group_named( | |
new_doc, | |
os.path.basename(root)) # Create a group named 'subdir' | |
if len(ff) > 1: | |
for filename in ff: | |
if filename.endswith('.png'): | |
for item in GROUPED_LAYERS: | |
# print(item) | |
# print(item in filename) | |
if item in filename: | |
# print( | |
# 'lets create a group {} and put the layer{} under it in folder {}' | |
# .format(item, filename, os.path.basename(root))) | |
os.chdir(os.path.realpath(root)) | |
try: | |
new_doc.LayerSets( | |
os.path.basename(root)).LayerSets(item) | |
except: | |
ng = create_group_named( | |
new_doc, item, | |
new_doc.LayerSets(os.path.basename(root))) | |
create_layer_from_file( | |
new_doc, filename, ng, | |
os.path.realpath(filename)) | |
else: | |
# print(new_doc.LayerSets(os.path.basename(root))) | |
create_layer_from_file( | |
new_doc, filename, | |
new_doc.LayerSets( | |
os.path.basename(root)).LayerSets(item), | |
os.path.realpath(filename)) | |
duplicate = True | |
os.chdir(ROOT_PATH) | |
if duplicate: | |
pass | |
duplicate = False | |
else: | |
os.chdir(os.path.realpath(root)) | |
# print('Rest files import as layers {} under {}'.format( | |
# filename, os.path.basename(root))) | |
if os.path.basename( | |
root) in IGNORED_DIRS or os.path.dirname( | |
root).replace(".\\", "") in IGNORED_DIRS: | |
pass | |
elif not os.path.dirname(root) == '.': | |
#print('layer {} on main group {} on group {}'.format(filename, os.path.dirname(root).replace(".\\",""), os.path.basename(root))) | |
create_layer_from_file( | |
new_doc, filename, | |
new_doc.LayerSets( | |
os.path.dirname(root).replace( | |
".\\", "")).LayerSets( | |
os.path.basename(root)), | |
os.path.realpath(filename)) | |
else: | |
create_layer_from_file( | |
new_doc, filename, | |
new_doc.LayerSets[os.path.basename(root)], | |
os.path.realpath(filename)) | |
os.chdir(ROOT_PATH) | |
else: | |
pass | |
# END OF monstrosity | |
### Cleaning Up! | |
# Delete the original background layer | |
new_doc.ArtLayers('Background').Delete() | |
# Turn off everything except the layers | |
# setup up for visibility | |
# loop through all layers and groups | |
# and turn them off | |
for layer in new_doc.artLayers: | |
layer.Visible = False | |
enable_layer_by_rule(layer, VISIBLE_LAYERS) | |
for layer_set in new_doc.LayerSets: | |
for n_layer in layer_set.artLayers: | |
n_layer.Visible = False | |
enable_layer_by_rule(n_layer, VISIBLE_LAYERS) | |
for n_layer_set in layer_set.LayerSets: | |
for n2_layer in n_layer_set.artLayers: | |
n2_layer.Visible = False | |
enable_layer_by_rule(n2_layer, VISIBLE_LAYERS) | |
# Move layers to the map correct structure | |
# described in DIR_TEMPLATE | |
# Get group and layer names and save them in a list | |
# according to placement preference | |
layers = ['shadow', 'background'] | |
# Search if they exist in the document | |
found_layers = [ | |
get_layer_by_name(new_doc, layer) | |
for layer in layers | |
if not type(get_layer_by_name(new_doc, layer)) == type(None) | |
] | |
found_layer_sets = [ | |
get_group_by_name(new_doc, layer_set) | |
for layer_set in DIR_TEMPLATE | |
if not type(get_group_by_name(new_doc, layer_set)) == type(None) | |
] | |
# Now go through them and re-order | |
for found_layer in found_layers: | |
found_layer.Move(new_doc, PLACEBEFORE) | |
for found_layer_set in found_layer_sets: | |
found_layer_set.Move(new_doc, PLACEAFTER) | |
# make it nice and | |
collapse_all_groups() | |
# be sure everything is saved | |
# TODO: add some error handling | |
_save_as_psb(OUTPUT_FILE) | |
# feel free to close the document now | |
# new_doc.Close(PS_DONTSAVE) # well, it's already saved above | |
# just for profiling | |
end = time.time() | |
print(end - start) | |
############## | |
# MAIN STUFF # | |
############## | |
def main(): | |
os.chdir(ROOT_PATH) | |
do_shit() | |
if __name__ == '__main__': | |
main() | |
### NOTES for the future! | |
# Create new layer or Group: | |
# doc.artLayers.add | |
# doc.layerSets.add | |
# Move layers: | |
# doc.Layers("Layer 4").Move(doc.Layers("Layer 3"), 4) #3:top, #4:bottom |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"root_dir" : "d:/tests/SH200/", | |
"output_file" : "d:/tests/render/test.psb", | |
"accessory_tags" : "Underbody, Grilles", | |
"visible_layers" : "shadow, background, ch2n3, Wheel_Accessory", | |
"include_lights" : "What", | |
"include_wheels" : true | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment